summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/tools/third_party/pytest-asyncio
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /testing/web-platform/tests/tools/third_party/pytest-asyncio
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esrupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/tools/third_party/pytest-asyncio')
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/CHANGELOG.rst169
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE201
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/MANIFEST.in5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/Makefile39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO285
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst259
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/constraints.txt24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/requirements.txt4
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/constraints.txt22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/requirements.txt4
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pyproject.toml10
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO285
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt51
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt11
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/_version.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py546
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg73
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures.py25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_scope.py25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_with_finalizer.py59
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_gen_fixtures.py38
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_nested.py26
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_parametrized_loop.py31
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/conftest.py32
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_base.py88
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_inherited_test.py20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/conftest.py17
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/test_loop_fixture_scope.py16
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_class_marker.py25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_module_marker.py39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_auto_mode.py139
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_legacy_mode.py112
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_strict_mode.py68
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/conftest.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/test_alternative_loops.py16
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/conftest.py16
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/test_respects_event_loop_policy.py17
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_asyncio_fixture.py64
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_dependent_fixtures.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_event_loop_scope.py37
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_flaky_integration.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_simple.py275
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_subprocess.py36
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/trio/test_fixtures.py25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tools/get-version.py17
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/tox.ini56
52 files changed, 3393 insertions, 0 deletions
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/pytest-asyncio/CHANGELOG.rst
new file mode 100644
index 0000000000..8de226c4f9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/CHANGELOG.rst
@@ -0,0 +1,169 @@
+=========
+Changelog
+=========
+
+0.19.0 (22-07-13)
+=================
+- BREAKING: The default ``asyncio_mode`` is now *strict*. `#293 <https://github.com/pytest-dev/pytest-asyncio/issues/293>`_
+- Removes `setup.py` since all relevant configuration is present `setup.cfg`. Users requiring an editable installation of pytest-asyncio need to use pip v21.1 or newer. `#283 <https://github.com/pytest-dev/pytest-asyncio/issues/283>`_
+- Declare support for Python 3.11.
+
+0.18.3 (22-03-25)
+=================
+- Adds `pytest-trio <https://pypi.org/project/pytest-trio/>`_ to the test dependencies
+- Fixes a bug that caused pytest-asyncio to try to set up async pytest_trio fixtures in strict mode. `#298 <https://github.com/pytest-dev/pytest-asyncio/issues/298>`_
+
+0.18.2 (22-03-03)
+=================
+- Fix asyncio auto mode not marking static methods. `#295 <https://github.com/pytest-dev/pytest-asyncio/issues/295>`_
+- Fix a compatibility issue with Hypothesis 6.39.0. `#302 <https://github.com/pytest-dev/pytest-asyncio/issues/302>`_
+
+0.18.1 (22-02-10)
+=================
+- Fixes a regression that prevented async fixtures from working in synchronous tests. `#286 <https://github.com/pytest-dev/pytest-asyncio/issues/286>`_
+
+0.18.0 (22-02-07)
+=================
+
+- Raise a warning if @pytest.mark.asyncio is applied to non-async function. `#275 <https://github.com/pytest-dev/pytest-asyncio/issues/275>`_
+- Support parametrized ``event_loop`` fixture. `#278 <https://github.com/pytest-dev/pytest-asyncio/issues/278>`_
+
+0.17.2 (22-01-17)
+=================
+
+- Require ``typing-extensions`` on Python<3.8 only. `#269 <https://github.com/pytest-dev/pytest-asyncio/issues/269>`_
+- Fix a regression in tests collection introduced by 0.17.1, the plugin works fine with non-python tests again. `#267 <https://github.com/pytest-dev/pytest-asyncio/issues/267>`_
+
+
+0.17.1 (22-01-16)
+=================
+- Fixes a bug that prevents async Hypothesis tests from working without explicit ``asyncio`` marker when ``--asyncio-mode=auto`` is set. `#258 <https://github.com/pytest-dev/pytest-asyncio/issues/258>`_
+- Fixed a bug that closes the default event loop if the loop doesn't exist `#257 <https://github.com/pytest-dev/pytest-asyncio/issues/257>`_
+- Added type annotations. `#198 <https://github.com/pytest-dev/pytest-asyncio/issues/198>`_
+- Show asyncio mode in pytest report headers. `#266 <https://github.com/pytest-dev/pytest-asyncio/issues/266>`_
+- Relax ``asyncio_mode`` type definition; it allows to support pytest 6.1+. `#262 <https://github.com/pytest-dev/pytest-asyncio/issues/262>`_
+
+0.17.0 (22-01-13)
+=================
+- `pytest-asyncio` no longer alters existing event loop policies. `#168 <https://github.com/pytest-dev/pytest-asyncio/issues/168>`_, `#188 <https://github.com/pytest-dev/pytest-asyncio/issues/168>`_
+- Drop support for Python 3.6
+- Fixed an issue when pytest-asyncio was used in combination with `flaky` or inherited asynchronous Hypothesis tests. `#178 <https://github.com/pytest-dev/pytest-asyncio/issues/178>`_ `#231 <https://github.com/pytest-dev/pytest-asyncio/issues/231>`_
+- Added `flaky <https://pypi.org/project/flaky/>`_ to test dependencies
+- Added ``unused_udp_port`` and ``unused_udp_port_factory`` fixtures (similar to ``unused_tcp_port`` and ``unused_tcp_port_factory`` counterparts. `#99 <https://github.com/pytest-dev/pytest-asyncio/issues/99>`_
+- Added the plugin modes: *strict*, *auto*, and *legacy*. See `documentation <https://github.com/pytest-dev/pytest-asyncio#modes>`_ for details. `#125 <https://github.com/pytest-dev/pytest-asyncio/issues/125>`_
+- Correctly process ``KeyboardInterrupt`` during async fixture setup phase `#219 <https://github.com/pytest-dev/pytest-asyncio/issues/219>`_
+
+0.16.0 (2021-10-16)
+===================
+- Add support for Python 3.10
+
+0.15.1 (2021-04-22)
+===================
+- Hotfix for errors while closing event loops while replacing them.
+ `#209 <https://github.com/pytest-dev/pytest-asyncio/issues/209>`_
+ `#210 <https://github.com/pytest-dev/pytest-asyncio/issues/210>`_
+
+0.15.0 (2021-04-19)
+===================
+- Add support for Python 3.9
+- Abandon support for Python 3.5. If you still require support for Python 3.5, please use pytest-asyncio v0.14 or earlier.
+- Set ``unused_tcp_port_factory`` fixture scope to 'session'.
+ `#163 <https://github.com/pytest-dev/pytest-asyncio/pull/163>`_
+- Properly close event loops when replacing them.
+ `#208 <https://github.com/pytest-dev/pytest-asyncio/issues/208>`_
+
+0.14.0 (2020-06-24)
+===================
+- Fix `#162 <https://github.com/pytest-dev/pytest-asyncio/issues/162>`_, and ``event_loop`` fixture behavior now is coherent on all scopes.
+ `#164 <https://github.com/pytest-dev/pytest-asyncio/pull/164>`_
+
+0.12.0 (2020-05-04)
+===================
+- Run the event loop fixture as soon as possible. This helps with fixtures that have an implicit dependency on the event loop.
+ `#156 <https://github.com/pytest-dev/pytest-asyncio/pull/156>`_
+
+0.11.0 (2020-04-20)
+===================
+- Test on 3.8, drop 3.3 and 3.4. Stick to 0.10 for these versions.
+ `#152 <https://github.com/pytest-dev/pytest-asyncio/pull/152>`_
+- Use the new Pytest 5.4.0 Function API. We therefore depend on pytest >= 5.4.0.
+ `#142 <https://github.com/pytest-dev/pytest-asyncio/pull/142>`_
+- Better ``pytest.skip`` support.
+ `#126 <https://github.com/pytest-dev/pytest-asyncio/pull/126>`_
+
+0.10.0 (2019-01-08)
+====================
+- ``pytest-asyncio`` integrates with `Hypothesis <https://hypothesis.readthedocs.io>`_
+ to support ``@given`` on async test functions using ``asyncio``.
+ `#102 <https://github.com/pytest-dev/pytest-asyncio/pull/102>`_
+- Pytest 4.1 support.
+ `#105 <https://github.com/pytest-dev/pytest-asyncio/pull/105>`_
+
+0.9.0 (2018-07-28)
+==================
+- Python 3.7 support.
+- Remove ``event_loop_process_pool`` fixture and
+ ``pytest.mark.asyncio_process_pool`` marker (see
+ https://bugs.python.org/issue34075 for deprecation and removal details)
+
+0.8.0 (2017-09-23)
+==================
+- Improve integration with other packages (like aiohttp) with more careful event loop handling.
+ `#64 <https://github.com/pytest-dev/pytest-asyncio/pull/64>`_
+
+0.7.0 (2017-09-08)
+==================
+- Python versions pre-3.6 can use the async_generator library for async fixtures.
+ `#62 <https://github.com/pytest-dev/pytest-asyncio/pull/62>`
+
+0.6.0 (2017-05-28)
+==================
+- Support for Python versions pre-3.5 has been dropped.
+- ``pytestmark`` now works on both module and class level.
+- The ``forbid_global_loop`` parameter has been removed.
+- Support for async and async gen fixtures has been added.
+ `#45 <https://github.com/pytest-dev/pytest-asyncio/pull/45>`_
+- The deprecation warning regarding ``asyncio.async()`` has been fixed.
+ `#51 <https://github.com/pytest-dev/pytest-asyncio/pull/51>`_
+
+0.5.0 (2016-09-07)
+==================
+- Introduced a changelog.
+ `#31 <https://github.com/pytest-dev/pytest-asyncio/issues/31>`_
+- The ``event_loop`` fixture is again responsible for closing itself.
+ This makes the fixture slightly harder to correctly override, but enables
+ other fixtures to depend on it correctly.
+ `#30 <https://github.com/pytest-dev/pytest-asyncio/issues/30>`_
+- Deal with the event loop policy by wrapping a special pytest hook,
+ ``pytest_fixture_setup``. This allows setting the policy before fixtures
+ dependent on the ``event_loop`` fixture run, thus allowing them to take
+ advantage of the ``forbid_global_loop`` parameter. As a consequence of this,
+ we now depend on pytest 3.0.
+ `#29 <https://github.com/pytest-dev/pytest-asyncio/issues/29>`_
+
+0.4.1 (2016-06-01)
+==================
+- Fix a bug preventing the propagation of exceptions from the plugin.
+ `#25 <https://github.com/pytest-dev/pytest-asyncio/issues/25>`_
+
+0.4.0 (2016-05-30)
+==================
+- Make ``event_loop`` fixtures simpler to override by closing them in the
+ plugin, instead of directly in the fixture.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+- Introduce the ``forbid_global_loop`` parameter.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+
+0.3.0 (2015-12-19)
+==================
+- Support for Python 3.5 ``async``/``await`` syntax.
+ `#17 <https://github.com/pytest-dev/pytest-asyncio/pull/17>`_
+
+0.2.0 (2015-08-01)
+==================
+- ``unused_tcp_port_factory`` fixture.
+ `#10 <https://github.com/pytest-dev/pytest-asyncio/issues/10>`_
+
+0.1.1 (2015-04-23)
+==================
+Initial release.
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE b/testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE
new file mode 100644
index 0000000000..5c304d1a4a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE
@@ -0,0 +1,201 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/MANIFEST.in b/testing/web-platform/tests/tools/third_party/pytest-asyncio/MANIFEST.in
new file mode 100644
index 0000000000..fdf813e915
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/MANIFEST.in
@@ -0,0 +1,5 @@
+include CHANGELOG.rst
+
+recursive-exclude .github *
+exclude .gitignore
+exclude .pre-commit-config.yaml
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/Makefile b/testing/web-platform/tests/tools/third_party/pytest-asyncio/Makefile
new file mode 100644
index 0000000000..2b0216f99e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/Makefile
@@ -0,0 +1,39 @@
+.PHONY: clean clean-build clean-pyc clean-test lint test
+
+clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
+
+clean-build: ## remove build artifacts
+ rm -fr build/
+ rm -fr dist/
+ rm -fr .eggs/
+ find . -name '*.egg-info' -exec rm -fr {} +
+ find . -name '*.egg' -exec rm -f {} +
+
+clean-pyc: ## remove Python file artifacts
+ find . -name '*.pyc' -exec rm -f {} +
+ find . -name '*.pyo' -exec rm -f {} +
+ find . -name '*~' -exec rm -f {} +
+ find . -name '__pycache__' -exec rm -fr {} +
+
+clean-test: ## remove test and coverage artifacts
+ rm -fr .tox/
+ rm -f .coverage
+ rm -fr htmlcov/
+
+lint:
+# CI env-var is set by GitHub actions
+ifdef CI
+ python -m pre_commit run --all-files --show-diff-on-failure
+else
+ python -m pre_commit run --all-files
+endif
+ python -m mypy pytest_asyncio --show-error-codes
+
+test:
+ coverage run -m pytest tests
+ coverage xml
+ coverage report
+
+install:
+ pip install -U pre-commit
+ pre-commit install
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO b/testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO
new file mode 100644
index 0000000000..19acaa4d51
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO
@@ -0,0 +1,285 @@
+Metadata-Version: 2.1
+Name: pytest-asyncio
+Version: 0.19.0
+Summary: Pytest support for asyncio
+Home-page: https://github.com/pytest-dev/pytest-asyncio
+Author: Tin Tvrtković <tinchester@gmail.com>
+Author-email: tinchester@gmail.com
+License: Apache 2.0
+Project-URL: GitHub, https://github.com/pytest-dev/pytest-asyncio
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Topic :: Software Development :: Testing
+Classifier: Framework :: AsyncIO
+Classifier: Framework :: Pytest
+Classifier: Typing :: Typed
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+Provides-Extra: testing
+License-File: LICENSE
+
+pytest-asyncio: pytest support for asyncio
+==========================================
+
+.. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+.. image:: https://github.com/pytest-dev/pytest-asyncio/workflows/CI/badge.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio/actions?workflow=CI
+.. image:: https://codecov.io/gh/pytest-dev/pytest-asyncio/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/pytest-dev/pytest-asyncio
+.. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+pytest-asyncio is an Apache2 licensed library, written in Python, for testing
+asyncio code with pytest.
+
+asyncio code is usually written in the form of coroutines, which makes it
+slightly more difficult to test using normal testing tools. pytest-asyncio
+provides useful fixtures and markers to make testing easier.
+
+.. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b"expected result" == res
+
+pytest-asyncio has been strongly influenced by pytest-tornado_.
+
+.. _pytest-tornado: https://github.com/eugeniy/pytest-tornado
+
+Features
+--------
+
+- fixtures for creating and injecting versions of the asyncio event loop
+- fixtures for injecting unused tcp/udp ports
+- pytest markers for treating tests as asyncio coroutines
+- easy testing with non-default event loops
+- support for `async def` fixtures and async generator fixtures
+- support *auto* mode to handle all async fixtures and tests automatically by asyncio;
+ provide *strict* mode if a test suite should work with different async frameworks
+ simultaneously, e.g. ``asyncio`` and ``trio``.
+
+Installation
+------------
+
+To install pytest-asyncio, simply:
+
+.. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+This is enough for pytest to pick up pytest-asyncio.
+
+Modes
+-----
+
+Starting from ``pytest-asyncio>=0.17``, three modes are provided: *auto*, *strict* and
+*legacy*. Starting from ``pytest-asyncio>=0.19`` the *strict* mode is the default.
+
+The mode can be set by ``asyncio_mode`` configuration option in `configuration file
+<https://docs.pytest.org/en/latest/reference/customize.html>`_:
+
+.. code-block:: ini
+
+ # pytest.ini
+ [pytest]
+ asyncio_mode = auto
+
+The value can be overridden by command-line option for ``pytest`` invocation:
+
+.. code-block:: bash
+
+ $ pytest tests --asyncio-mode=strict
+
+Auto mode
+~~~~~~~~~
+
+When the mode is auto, all discovered *async* tests are considered *asyncio-driven* even
+if they have no ``@pytest.mark.asyncio`` marker.
+
+All async fixtures are considered *asyncio-driven* as well, even if they are decorated
+with a regular ``@pytest.fixture`` decorator instead of dedicated
+``@pytest_asyncio.fixture`` counterpart.
+
+*asyncio-driven* means that tests and fixtures are executed by ``pytest-asyncio``
+plugin.
+
+This mode requires the simplest tests and fixtures configuration and is
+recommended for default usage *unless* the same project and its test suite should
+execute tests from different async frameworks, e.g. ``asyncio`` and ``trio``. In this
+case, auto-handling can break tests designed for other framework; please use *strict*
+mode instead.
+
+Strict mode
+~~~~~~~~~~~
+
+Strict mode enforces ``@pytest.mark.asyncio`` and ``@pytest_asyncio.fixture`` usage.
+Without these markers, tests and fixtures are not considered as *asyncio-driven*, other
+pytest plugin can handle them.
+
+Please use this mode if multiple async frameworks should be combined in the same test
+suite.
+
+This mode is used by default for the sake of project inter-compatibility.
+
+
+Legacy mode
+~~~~~~~~~~~
+
+This mode follows rules used by ``pytest-asyncio<0.17``: tests are not auto-marked but
+fixtures are.
+
+Deprecation warnings are emitted with suggestion to either switching to ``auto`` mode
+or using ``strict`` mode with ``@pytest_asyncio.fixture`` decorators.
+
+The default was changed to ``strict`` in ``pytest-asyncio>=0.19``.
+
+
+Fixtures
+--------
+
+``event_loop``
+~~~~~~~~~~~~~~
+Creates a new asyncio event loop based on the current event loop policy. The new loop
+is available as the return value of this fixture or via `asyncio.get_running_loop <https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.get_running_loop>`__.
+The event loop is closed when the fixture scope ends. The fixture scope defaults
+to ``function`` scope.
+
+Note that just using the ``event_loop`` fixture won't make your test function
+a coroutine. You'll need to interact with the event loop directly, using methods
+like ``event_loop.run_until_complete``. See the ``pytest.mark.asyncio`` marker
+for treating test functions like coroutines.
+
+.. code-block:: python
+
+ def test_http_client(event_loop):
+ url = "http://httpbin.org/get"
+ resp = event_loop.run_until_complete(http_client(url))
+ assert b"HTTP/1.1 200 OK" in resp
+
+The ``event_loop`` fixture can be overridden in any of the standard pytest locations,
+e.g. directly in the test file, or in ``conftest.py``. This allows redefining the
+fixture scope, for example:
+
+.. code-block:: python
+
+ @pytest.fixture(scope="session")
+ def event_loop():
+ policy = asyncio.get_event_loop_policy()
+ loop = policy.new_event_loop()
+ yield loop
+ loop.close()
+
+If you need to change the type of the event loop, prefer setting a custom event loop policy over redefining the ``event_loop`` fixture.
+
+If the ``pytest.mark.asyncio`` marker is applied to a test function, the ``event_loop``
+fixture will be requested automatically by the test function.
+
+``unused_tcp_port``
+~~~~~~~~~~~~~~~~~~~
+Finds and yields a single unused TCP port on the localhost interface. Useful for
+binding temporary test servers.
+
+``unused_tcp_port_factory``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+A callable which returns a different unused TCP port each invocation. Useful
+when several unused TCP ports are required in a test.
+
+.. code-block:: python
+
+ def a_test(unused_tcp_port_factory):
+ port1, port2 = unused_tcp_port_factory(), unused_tcp_port_factory()
+ ...
+
+``unused_udp_port`` and ``unused_udp_port_factory``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Work just like their TCP counterparts but return unused UDP ports.
+
+
+Async fixtures
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Asynchronous fixtures are defined just like ordinary pytest fixtures, except they should be decorated with ``@pytest_asyncio.fixture``.
+
+.. code-block:: python3
+
+ import pytest_asyncio
+
+
+ @pytest_asyncio.fixture
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ yield "a value"
+
+
+ @pytest_asyncio.fixture(scope="module")
+ async def async_fixture():
+ return await asyncio.sleep(0.1)
+
+All scopes are supported, but if you use a non-function scope you will need
+to redefine the ``event_loop`` fixture to have the same or broader scope.
+Async fixtures need the event loop, and so must have the same or narrower scope
+than the ``event_loop`` fixture.
+
+*auto* and *legacy* mode automatically converts async fixtures declared with the
+standard ``@pytest.fixture`` decorator to *asyncio-driven* versions.
+
+
+Markers
+-------
+
+``pytest.mark.asyncio``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Mark your test coroutine with this marker and pytest will execute it as an
+asyncio task using the event loop provided by the ``event_loop`` fixture. See
+the introductory section for an example.
+
+The event loop used can be overridden by overriding the ``event_loop`` fixture
+(see above).
+
+In order to make your test code a little more concise, the pytest |pytestmark|_
+feature can be used to mark entire modules or classes with this marker.
+Only test coroutines will be affected (by default, coroutines prefixed by
+``test_``), so, for example, fixtures are safe to define.
+
+.. code-block:: python
+
+ import asyncio
+
+ import pytest
+
+ # All test coroutines will be treated as marked.
+ pytestmark = pytest.mark.asyncio
+
+
+ async def test_example(event_loop):
+ """No marker!"""
+ await asyncio.sleep(0, loop=event_loop)
+
+In *auto* mode, the ``pytest.mark.asyncio`` marker can be omitted, the marker is added
+automatically to *async* test functions.
+
+
+.. |pytestmark| replace:: ``pytestmark``
+.. _pytestmark: http://doc.pytest.org/en/latest/example/markers.html#marking-whole-classes-or-modules
+
+Note about unittest
+-------------------
+
+Test classes subclassing the standard `unittest <https://docs.python.org/3/library/unittest.html>`__ library are not supported, users
+are recommended to use `unittest.IsolatedAsyncioTestCase <https://docs.python.org/3/library/unittest.html#unittest.IsolatedAsyncioTestCase>`__
+or an async framework such as `asynctest <https://asynctest.readthedocs.io/en/latest>`__.
+
+Contributing
+------------
+Contributions are very welcome. Tests can be run with ``tox``, please ensure
+the coverage at least stays the same before you submit a pull request.
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst b/testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst
new file mode 100644
index 0000000000..1fc5ef4738
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst
@@ -0,0 +1,259 @@
+pytest-asyncio: pytest support for asyncio
+==========================================
+
+.. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+.. image:: https://github.com/pytest-dev/pytest-asyncio/workflows/CI/badge.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio/actions?workflow=CI
+.. image:: https://codecov.io/gh/pytest-dev/pytest-asyncio/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/pytest-dev/pytest-asyncio
+.. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+pytest-asyncio is an Apache2 licensed library, written in Python, for testing
+asyncio code with pytest.
+
+asyncio code is usually written in the form of coroutines, which makes it
+slightly more difficult to test using normal testing tools. pytest-asyncio
+provides useful fixtures and markers to make testing easier.
+
+.. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b"expected result" == res
+
+pytest-asyncio has been strongly influenced by pytest-tornado_.
+
+.. _pytest-tornado: https://github.com/eugeniy/pytest-tornado
+
+Features
+--------
+
+- fixtures for creating and injecting versions of the asyncio event loop
+- fixtures for injecting unused tcp/udp ports
+- pytest markers for treating tests as asyncio coroutines
+- easy testing with non-default event loops
+- support for `async def` fixtures and async generator fixtures
+- support *auto* mode to handle all async fixtures and tests automatically by asyncio;
+ provide *strict* mode if a test suite should work with different async frameworks
+ simultaneously, e.g. ``asyncio`` and ``trio``.
+
+Installation
+------------
+
+To install pytest-asyncio, simply:
+
+.. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+This is enough for pytest to pick up pytest-asyncio.
+
+Modes
+-----
+
+Starting from ``pytest-asyncio>=0.17``, three modes are provided: *auto*, *strict* and
+*legacy*. Starting from ``pytest-asyncio>=0.19`` the *strict* mode is the default.
+
+The mode can be set by ``asyncio_mode`` configuration option in `configuration file
+<https://docs.pytest.org/en/latest/reference/customize.html>`_:
+
+.. code-block:: ini
+
+ # pytest.ini
+ [pytest]
+ asyncio_mode = auto
+
+The value can be overridden by command-line option for ``pytest`` invocation:
+
+.. code-block:: bash
+
+ $ pytest tests --asyncio-mode=strict
+
+Auto mode
+~~~~~~~~~
+
+When the mode is auto, all discovered *async* tests are considered *asyncio-driven* even
+if they have no ``@pytest.mark.asyncio`` marker.
+
+All async fixtures are considered *asyncio-driven* as well, even if they are decorated
+with a regular ``@pytest.fixture`` decorator instead of dedicated
+``@pytest_asyncio.fixture`` counterpart.
+
+*asyncio-driven* means that tests and fixtures are executed by ``pytest-asyncio``
+plugin.
+
+This mode requires the simplest tests and fixtures configuration and is
+recommended for default usage *unless* the same project and its test suite should
+execute tests from different async frameworks, e.g. ``asyncio`` and ``trio``. In this
+case, auto-handling can break tests designed for other framework; please use *strict*
+mode instead.
+
+Strict mode
+~~~~~~~~~~~
+
+Strict mode enforces ``@pytest.mark.asyncio`` and ``@pytest_asyncio.fixture`` usage.
+Without these markers, tests and fixtures are not considered as *asyncio-driven*, other
+pytest plugin can handle them.
+
+Please use this mode if multiple async frameworks should be combined in the same test
+suite.
+
+This mode is used by default for the sake of project inter-compatibility.
+
+
+Legacy mode
+~~~~~~~~~~~
+
+This mode follows rules used by ``pytest-asyncio<0.17``: tests are not auto-marked but
+fixtures are.
+
+Deprecation warnings are emitted with suggestion to either switching to ``auto`` mode
+or using ``strict`` mode with ``@pytest_asyncio.fixture`` decorators.
+
+The default was changed to ``strict`` in ``pytest-asyncio>=0.19``.
+
+
+Fixtures
+--------
+
+``event_loop``
+~~~~~~~~~~~~~~
+Creates a new asyncio event loop based on the current event loop policy. The new loop
+is available as the return value of this fixture or via `asyncio.get_running_loop <https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.get_running_loop>`__.
+The event loop is closed when the fixture scope ends. The fixture scope defaults
+to ``function`` scope.
+
+Note that just using the ``event_loop`` fixture won't make your test function
+a coroutine. You'll need to interact with the event loop directly, using methods
+like ``event_loop.run_until_complete``. See the ``pytest.mark.asyncio`` marker
+for treating test functions like coroutines.
+
+.. code-block:: python
+
+ def test_http_client(event_loop):
+ url = "http://httpbin.org/get"
+ resp = event_loop.run_until_complete(http_client(url))
+ assert b"HTTP/1.1 200 OK" in resp
+
+The ``event_loop`` fixture can be overridden in any of the standard pytest locations,
+e.g. directly in the test file, or in ``conftest.py``. This allows redefining the
+fixture scope, for example:
+
+.. code-block:: python
+
+ @pytest.fixture(scope="session")
+ def event_loop():
+ policy = asyncio.get_event_loop_policy()
+ loop = policy.new_event_loop()
+ yield loop
+ loop.close()
+
+If you need to change the type of the event loop, prefer setting a custom event loop policy over redefining the ``event_loop`` fixture.
+
+If the ``pytest.mark.asyncio`` marker is applied to a test function, the ``event_loop``
+fixture will be requested automatically by the test function.
+
+``unused_tcp_port``
+~~~~~~~~~~~~~~~~~~~
+Finds and yields a single unused TCP port on the localhost interface. Useful for
+binding temporary test servers.
+
+``unused_tcp_port_factory``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+A callable which returns a different unused TCP port each invocation. Useful
+when several unused TCP ports are required in a test.
+
+.. code-block:: python
+
+ def a_test(unused_tcp_port_factory):
+ port1, port2 = unused_tcp_port_factory(), unused_tcp_port_factory()
+ ...
+
+``unused_udp_port`` and ``unused_udp_port_factory``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Work just like their TCP counterparts but return unused UDP ports.
+
+
+Async fixtures
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Asynchronous fixtures are defined just like ordinary pytest fixtures, except they should be decorated with ``@pytest_asyncio.fixture``.
+
+.. code-block:: python3
+
+ import pytest_asyncio
+
+
+ @pytest_asyncio.fixture
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ yield "a value"
+
+
+ @pytest_asyncio.fixture(scope="module")
+ async def async_fixture():
+ return await asyncio.sleep(0.1)
+
+All scopes are supported, but if you use a non-function scope you will need
+to redefine the ``event_loop`` fixture to have the same or broader scope.
+Async fixtures need the event loop, and so must have the same or narrower scope
+than the ``event_loop`` fixture.
+
+*auto* and *legacy* mode automatically converts async fixtures declared with the
+standard ``@pytest.fixture`` decorator to *asyncio-driven* versions.
+
+
+Markers
+-------
+
+``pytest.mark.asyncio``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Mark your test coroutine with this marker and pytest will execute it as an
+asyncio task using the event loop provided by the ``event_loop`` fixture. See
+the introductory section for an example.
+
+The event loop used can be overridden by overriding the ``event_loop`` fixture
+(see above).
+
+In order to make your test code a little more concise, the pytest |pytestmark|_
+feature can be used to mark entire modules or classes with this marker.
+Only test coroutines will be affected (by default, coroutines prefixed by
+``test_``), so, for example, fixtures are safe to define.
+
+.. code-block:: python
+
+ import asyncio
+
+ import pytest
+
+ # All test coroutines will be treated as marked.
+ pytestmark = pytest.mark.asyncio
+
+
+ async def test_example(event_loop):
+ """No marker!"""
+ await asyncio.sleep(0, loop=event_loop)
+
+In *auto* mode, the ``pytest.mark.asyncio`` marker can be omitted, the marker is added
+automatically to *async* test functions.
+
+
+.. |pytestmark| replace:: ``pytestmark``
+.. _pytestmark: http://doc.pytest.org/en/latest/example/markers.html#marking-whole-classes-or-modules
+
+Note about unittest
+-------------------
+
+Test classes subclassing the standard `unittest <https://docs.python.org/3/library/unittest.html>`__ library are not supported, users
+are recommended to use `unittest.IsolatedAsyncioTestCase <https://docs.python.org/3/library/unittest.html#unittest.IsolatedAsyncioTestCase>`__
+or an async framework such as `asynctest <https://asynctest.readthedocs.io/en/latest>`__.
+
+Contributing
+------------
+Contributions are very welcome. Tests can be run with ``tox``, please ensure
+the coverage at least stays the same before you submit a pull request.
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/constraints.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/constraints.txt
new file mode 100644
index 0000000000..cd99339f5e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/constraints.txt
@@ -0,0 +1,24 @@
+async-generator==1.10
+attrs==21.4.0
+coverage==6.4.1
+flaky==3.7.0
+hypothesis==6.48.3
+idna==3.3
+importlib-metadata==4.12.0
+iniconfig==1.1.1
+mypy==0.961
+mypy-extensions==0.4.3
+outcome==1.2.0
+packaging==21.3
+pluggy==1.0.0
+py==1.11.0
+pyparsing==3.0.9
+pytest==7.1.2
+pytest-trio==0.7.0
+sniffio==1.2.0
+sortedcontainers==2.4.0
+tomli==2.0.1
+trio==0.21.0
+typed-ast==1.5.4
+typing_extensions==4.3.0
+zipp==3.8.0
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/requirements.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/requirements.txt
new file mode 100644
index 0000000000..01b2484e6b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/default/requirements.txt
@@ -0,0 +1,4 @@
+# Always adjust install_requires in setup.cfg and pytest-min-requirements.txt
+# when changing runtime dependencies
+pytest >= 6.1.0
+typing-extensions >= 3.7.2; python_version < "3.8"
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/constraints.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/constraints.txt
new file mode 100644
index 0000000000..33f7948f4c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/constraints.txt
@@ -0,0 +1,22 @@
+async-generator==1.10
+attrs==21.4.0
+coverage==6.3.2
+flaky==3.7.0
+hypothesis==6.43.3
+idna==3.3
+iniconfig==1.1.1
+mypy==0.942
+mypy-extensions==0.4.3
+outcome==1.1.0
+packaging==21.3
+pluggy==0.13.1
+py==1.11.0
+pyparsing==3.0.8
+pytest==6.1.0
+pytest-trio==0.7.0
+sniffio==1.2.0
+sortedcontainers==2.4.0
+toml==0.10.2
+tomli==2.0.1
+trio==0.20.0
+typing_extensions==4.2.0
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/requirements.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/requirements.txt
new file mode 100644
index 0000000000..4fc6ef2fa3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/dependencies/pytest-min/requirements.txt
@@ -0,0 +1,4 @@
+# Always adjust install_requires in setup.cfg and requirements.txt
+# when changing minimum version dependencies
+pytest == 6.1.0
+typing-extensions >= 3.7.2; python_version < "3.8"
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pyproject.toml b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pyproject.toml
new file mode 100644
index 0000000000..81540a53a6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = [
+ "setuptools>=51.0",
+ "wheel>=0.36",
+ "setuptools_scm[toml]>=6.2"
+]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools_scm]
+write_to = "pytest_asyncio/_version.py"
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO
new file mode 100644
index 0000000000..19acaa4d51
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO
@@ -0,0 +1,285 @@
+Metadata-Version: 2.1
+Name: pytest-asyncio
+Version: 0.19.0
+Summary: Pytest support for asyncio
+Home-page: https://github.com/pytest-dev/pytest-asyncio
+Author: Tin Tvrtković <tinchester@gmail.com>
+Author-email: tinchester@gmail.com
+License: Apache 2.0
+Project-URL: GitHub, https://github.com/pytest-dev/pytest-asyncio
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Topic :: Software Development :: Testing
+Classifier: Framework :: AsyncIO
+Classifier: Framework :: Pytest
+Classifier: Typing :: Typed
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+Provides-Extra: testing
+License-File: LICENSE
+
+pytest-asyncio: pytest support for asyncio
+==========================================
+
+.. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+.. image:: https://github.com/pytest-dev/pytest-asyncio/workflows/CI/badge.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio/actions?workflow=CI
+.. image:: https://codecov.io/gh/pytest-dev/pytest-asyncio/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/pytest-dev/pytest-asyncio
+.. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+pytest-asyncio is an Apache2 licensed library, written in Python, for testing
+asyncio code with pytest.
+
+asyncio code is usually written in the form of coroutines, which makes it
+slightly more difficult to test using normal testing tools. pytest-asyncio
+provides useful fixtures and markers to make testing easier.
+
+.. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b"expected result" == res
+
+pytest-asyncio has been strongly influenced by pytest-tornado_.
+
+.. _pytest-tornado: https://github.com/eugeniy/pytest-tornado
+
+Features
+--------
+
+- fixtures for creating and injecting versions of the asyncio event loop
+- fixtures for injecting unused tcp/udp ports
+- pytest markers for treating tests as asyncio coroutines
+- easy testing with non-default event loops
+- support for `async def` fixtures and async generator fixtures
+- support *auto* mode to handle all async fixtures and tests automatically by asyncio;
+ provide *strict* mode if a test suite should work with different async frameworks
+ simultaneously, e.g. ``asyncio`` and ``trio``.
+
+Installation
+------------
+
+To install pytest-asyncio, simply:
+
+.. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+This is enough for pytest to pick up pytest-asyncio.
+
+Modes
+-----
+
+Starting from ``pytest-asyncio>=0.17``, three modes are provided: *auto*, *strict* and
+*legacy*. Starting from ``pytest-asyncio>=0.19`` the *strict* mode is the default.
+
+The mode can be set by ``asyncio_mode`` configuration option in `configuration file
+<https://docs.pytest.org/en/latest/reference/customize.html>`_:
+
+.. code-block:: ini
+
+ # pytest.ini
+ [pytest]
+ asyncio_mode = auto
+
+The value can be overridden by command-line option for ``pytest`` invocation:
+
+.. code-block:: bash
+
+ $ pytest tests --asyncio-mode=strict
+
+Auto mode
+~~~~~~~~~
+
+When the mode is auto, all discovered *async* tests are considered *asyncio-driven* even
+if they have no ``@pytest.mark.asyncio`` marker.
+
+All async fixtures are considered *asyncio-driven* as well, even if they are decorated
+with a regular ``@pytest.fixture`` decorator instead of dedicated
+``@pytest_asyncio.fixture`` counterpart.
+
+*asyncio-driven* means that tests and fixtures are executed by ``pytest-asyncio``
+plugin.
+
+This mode requires the simplest tests and fixtures configuration and is
+recommended for default usage *unless* the same project and its test suite should
+execute tests from different async frameworks, e.g. ``asyncio`` and ``trio``. In this
+case, auto-handling can break tests designed for other framework; please use *strict*
+mode instead.
+
+Strict mode
+~~~~~~~~~~~
+
+Strict mode enforces ``@pytest.mark.asyncio`` and ``@pytest_asyncio.fixture`` usage.
+Without these markers, tests and fixtures are not considered as *asyncio-driven*, other
+pytest plugin can handle them.
+
+Please use this mode if multiple async frameworks should be combined in the same test
+suite.
+
+This mode is used by default for the sake of project inter-compatibility.
+
+
+Legacy mode
+~~~~~~~~~~~
+
+This mode follows rules used by ``pytest-asyncio<0.17``: tests are not auto-marked but
+fixtures are.
+
+Deprecation warnings are emitted with suggestion to either switching to ``auto`` mode
+or using ``strict`` mode with ``@pytest_asyncio.fixture`` decorators.
+
+The default was changed to ``strict`` in ``pytest-asyncio>=0.19``.
+
+
+Fixtures
+--------
+
+``event_loop``
+~~~~~~~~~~~~~~
+Creates a new asyncio event loop based on the current event loop policy. The new loop
+is available as the return value of this fixture or via `asyncio.get_running_loop <https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.get_running_loop>`__.
+The event loop is closed when the fixture scope ends. The fixture scope defaults
+to ``function`` scope.
+
+Note that just using the ``event_loop`` fixture won't make your test function
+a coroutine. You'll need to interact with the event loop directly, using methods
+like ``event_loop.run_until_complete``. See the ``pytest.mark.asyncio`` marker
+for treating test functions like coroutines.
+
+.. code-block:: python
+
+ def test_http_client(event_loop):
+ url = "http://httpbin.org/get"
+ resp = event_loop.run_until_complete(http_client(url))
+ assert b"HTTP/1.1 200 OK" in resp
+
+The ``event_loop`` fixture can be overridden in any of the standard pytest locations,
+e.g. directly in the test file, or in ``conftest.py``. This allows redefining the
+fixture scope, for example:
+
+.. code-block:: python
+
+ @pytest.fixture(scope="session")
+ def event_loop():
+ policy = asyncio.get_event_loop_policy()
+ loop = policy.new_event_loop()
+ yield loop
+ loop.close()
+
+If you need to change the type of the event loop, prefer setting a custom event loop policy over redefining the ``event_loop`` fixture.
+
+If the ``pytest.mark.asyncio`` marker is applied to a test function, the ``event_loop``
+fixture will be requested automatically by the test function.
+
+``unused_tcp_port``
+~~~~~~~~~~~~~~~~~~~
+Finds and yields a single unused TCP port on the localhost interface. Useful for
+binding temporary test servers.
+
+``unused_tcp_port_factory``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+A callable which returns a different unused TCP port each invocation. Useful
+when several unused TCP ports are required in a test.
+
+.. code-block:: python
+
+ def a_test(unused_tcp_port_factory):
+ port1, port2 = unused_tcp_port_factory(), unused_tcp_port_factory()
+ ...
+
+``unused_udp_port`` and ``unused_udp_port_factory``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Work just like their TCP counterparts but return unused UDP ports.
+
+
+Async fixtures
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Asynchronous fixtures are defined just like ordinary pytest fixtures, except they should be decorated with ``@pytest_asyncio.fixture``.
+
+.. code-block:: python3
+
+ import pytest_asyncio
+
+
+ @pytest_asyncio.fixture
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ yield "a value"
+
+
+ @pytest_asyncio.fixture(scope="module")
+ async def async_fixture():
+ return await asyncio.sleep(0.1)
+
+All scopes are supported, but if you use a non-function scope you will need
+to redefine the ``event_loop`` fixture to have the same or broader scope.
+Async fixtures need the event loop, and so must have the same or narrower scope
+than the ``event_loop`` fixture.
+
+*auto* and *legacy* mode automatically converts async fixtures declared with the
+standard ``@pytest.fixture`` decorator to *asyncio-driven* versions.
+
+
+Markers
+-------
+
+``pytest.mark.asyncio``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Mark your test coroutine with this marker and pytest will execute it as an
+asyncio task using the event loop provided by the ``event_loop`` fixture. See
+the introductory section for an example.
+
+The event loop used can be overridden by overriding the ``event_loop`` fixture
+(see above).
+
+In order to make your test code a little more concise, the pytest |pytestmark|_
+feature can be used to mark entire modules or classes with this marker.
+Only test coroutines will be affected (by default, coroutines prefixed by
+``test_``), so, for example, fixtures are safe to define.
+
+.. code-block:: python
+
+ import asyncio
+
+ import pytest
+
+ # All test coroutines will be treated as marked.
+ pytestmark = pytest.mark.asyncio
+
+
+ async def test_example(event_loop):
+ """No marker!"""
+ await asyncio.sleep(0, loop=event_loop)
+
+In *auto* mode, the ``pytest.mark.asyncio`` marker can be omitted, the marker is added
+automatically to *async* test functions.
+
+
+.. |pytestmark| replace:: ``pytestmark``
+.. _pytestmark: http://doc.pytest.org/en/latest/example/markers.html#marking-whole-classes-or-modules
+
+Note about unittest
+-------------------
+
+Test classes subclassing the standard `unittest <https://docs.python.org/3/library/unittest.html>`__ library are not supported, users
+are recommended to use `unittest.IsolatedAsyncioTestCase <https://docs.python.org/3/library/unittest.html#unittest.IsolatedAsyncioTestCase>`__
+or an async framework such as `asynctest <https://asynctest.readthedocs.io/en/latest>`__.
+
+Contributing
+------------
+Contributions are very welcome. Tests can be run with ``tox``, please ensure
+the coverage at least stays the same before you submit a pull request.
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..a016c1cae0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt
@@ -0,0 +1,51 @@
+CHANGELOG.rst
+LICENSE
+MANIFEST.in
+Makefile
+README.rst
+pyproject.toml
+setup.cfg
+tox.ini
+dependencies/default/constraints.txt
+dependencies/default/requirements.txt
+dependencies/pytest-min/constraints.txt
+dependencies/pytest-min/requirements.txt
+pytest_asyncio/__init__.py
+pytest_asyncio/_version.py
+pytest_asyncio/plugin.py
+pytest_asyncio/py.typed
+pytest_asyncio.egg-info/PKG-INFO
+pytest_asyncio.egg-info/SOURCES.txt
+pytest_asyncio.egg-info/dependency_links.txt
+pytest_asyncio.egg-info/entry_points.txt
+pytest_asyncio.egg-info/requires.txt
+pytest_asyncio.egg-info/top_level.txt
+tests/conftest.py
+tests/test_asyncio_fixture.py
+tests/test_dependent_fixtures.py
+tests/test_event_loop_scope.py
+tests/test_flaky_integration.py
+tests/test_simple.py
+tests/test_subprocess.py
+tests/async_fixtures/__init__.py
+tests/async_fixtures/test_async_fixtures.py
+tests/async_fixtures/test_async_fixtures_scope.py
+tests/async_fixtures/test_async_fixtures_with_finalizer.py
+tests/async_fixtures/test_async_gen_fixtures.py
+tests/async_fixtures/test_nested.py
+tests/async_fixtures/test_parametrized_loop.py
+tests/hypothesis/test_base.py
+tests/hypothesis/test_inherited_test.py
+tests/loop_fixture_scope/conftest.py
+tests/loop_fixture_scope/test_loop_fixture_scope.py
+tests/markers/test_class_marker.py
+tests/markers/test_module_marker.py
+tests/modes/test_auto_mode.py
+tests/modes/test_legacy_mode.py
+tests/modes/test_strict_mode.py
+tests/multiloop/conftest.py
+tests/multiloop/test_alternative_loops.py
+tests/respect_event_loop_policy/conftest.py
+tests/respect_event_loop_policy/test_respects_event_loop_policy.py
+tests/trio/test_fixtures.py
+tools/get-version.py \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt
new file mode 100644
index 0000000000..88db714dad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt
@@ -0,0 +1,2 @@
+[pytest11]
+asyncio = pytest_asyncio.plugin
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt
new file mode 100644
index 0000000000..1e3119384d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt
@@ -0,0 +1,11 @@
+pytest>=6.1.0
+
+[:python_version < "3.8"]
+typing-extensions>=3.7.2
+
+[testing]
+coverage>=6.2
+hypothesis>=5.7.1
+flaky>=3.5.0
+mypy>=0.931
+pytest-trio>=0.7.0
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt
new file mode 100644
index 0000000000..08d05d1ecf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt
@@ -0,0 +1 @@
+pytest_asyncio
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py
new file mode 100644
index 0000000000..1bc2811d93
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py
@@ -0,0 +1,5 @@
+"""The main point for importing pytest-asyncio items."""
+from ._version import version as __version__ # noqa
+from .plugin import fixture
+
+__all__ = ("fixture",)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/_version.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/_version.py
new file mode 100644
index 0000000000..76aa7a209a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/_version.py
@@ -0,0 +1,5 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+__version__ = version = '0.19.0'
+__version_tuple__ = version_tuple = (0, 19, 0)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py
new file mode 100644
index 0000000000..dd6a782b4d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py
@@ -0,0 +1,546 @@
+"""pytest-asyncio implementation."""
+import asyncio
+import contextlib
+import enum
+import functools
+import inspect
+import socket
+import sys
+import warnings
+from typing import (
+ Any,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+
+import pytest
+
+if sys.version_info >= (3, 8):
+ from typing import Literal
+else:
+ from typing_extensions import Literal
+
+_R = TypeVar("_R")
+
+_ScopeName = Literal["session", "package", "module", "class", "function"]
+_T = TypeVar("_T")
+
+SimpleFixtureFunction = TypeVar(
+ "SimpleFixtureFunction", bound=Callable[..., Awaitable[_R]]
+)
+FactoryFixtureFunction = TypeVar(
+ "FactoryFixtureFunction", bound=Callable[..., AsyncIterator[_R]]
+)
+FixtureFunction = Union[SimpleFixtureFunction, FactoryFixtureFunction]
+FixtureFunctionMarker = Callable[[FixtureFunction], FixtureFunction]
+
+Config = Any # pytest < 7.0
+PytestPluginManager = Any # pytest < 7.0
+FixtureDef = Any # pytest < 7.0
+Parser = Any # pytest < 7.0
+SubRequest = Any # pytest < 7.0
+
+
+class Mode(str, enum.Enum):
+ AUTO = "auto"
+ STRICT = "strict"
+ LEGACY = "legacy"
+
+
+LEGACY_MODE = DeprecationWarning(
+ "The 'asyncio_mode' default value will change to 'strict' in future, "
+ "please explicitly use 'asyncio_mode=strict' or 'asyncio_mode=auto' "
+ "in pytest configuration file."
+)
+
+LEGACY_ASYNCIO_FIXTURE = (
+ "'@pytest.fixture' is applied to {name} "
+ "in 'legacy' mode, "
+ "please replace it with '@pytest_asyncio.fixture' as a preparation "
+ "for switching to 'strict' mode (or use 'auto' mode to seamlessly handle "
+ "all these fixtures as asyncio-driven)."
+)
+
+
+ASYNCIO_MODE_HELP = """\
+'auto' - for automatically handling all async functions by the plugin
+'strict' - for autoprocessing disabling (useful if different async frameworks \
+should be tested together, e.g. \
+both pytest-asyncio and pytest-trio are used in the same project)
+'legacy' - for keeping compatibility with pytest-asyncio<0.17: \
+auto-handling is disabled but pytest_asyncio.fixture usage is not enforced
+"""
+
+
+def pytest_addoption(parser: Parser, pluginmanager: PytestPluginManager) -> None:
+ group = parser.getgroup("asyncio")
+ group.addoption(
+ "--asyncio-mode",
+ dest="asyncio_mode",
+ default=None,
+ metavar="MODE",
+ help=ASYNCIO_MODE_HELP,
+ )
+ parser.addini(
+ "asyncio_mode",
+ help="default value for --asyncio-mode",
+ default="strict",
+ )
+
+
+@overload
+def fixture(
+ fixture_function: FixtureFunction,
+ *,
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ...,
+ params: Optional[Iterable[object]] = ...,
+ autouse: bool = ...,
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = ...,
+ name: Optional[str] = ...,
+) -> FixtureFunction:
+ ...
+
+
+@overload
+def fixture(
+ fixture_function: None = ...,
+ *,
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ...,
+ params: Optional[Iterable[object]] = ...,
+ autouse: bool = ...,
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = ...,
+ name: Optional[str] = None,
+) -> FixtureFunctionMarker:
+ ...
+
+
+def fixture(
+ fixture_function: Optional[FixtureFunction] = None, **kwargs: Any
+) -> Union[FixtureFunction, FixtureFunctionMarker]:
+ if fixture_function is not None:
+ _set_explicit_asyncio_mark(fixture_function)
+ return pytest.fixture(fixture_function, **kwargs)
+
+ else:
+
+ @functools.wraps(fixture)
+ def inner(fixture_function: FixtureFunction) -> FixtureFunction:
+ return fixture(fixture_function, **kwargs)
+
+ return inner
+
+
+def _has_explicit_asyncio_mark(obj: Any) -> bool:
+ obj = getattr(obj, "__func__", obj) # instance method maybe?
+ return getattr(obj, "_force_asyncio_fixture", False)
+
+
+def _set_explicit_asyncio_mark(obj: Any) -> None:
+ if hasattr(obj, "__func__"):
+ # instance method, check the function object
+ obj = obj.__func__
+ obj._force_asyncio_fixture = True
+
+
+def _is_coroutine(obj: Any) -> bool:
+ """Check to see if an object is really an asyncio coroutine."""
+ return asyncio.iscoroutinefunction(obj)
+
+
+def _is_coroutine_or_asyncgen(obj: Any) -> bool:
+ return _is_coroutine(obj) or inspect.isasyncgenfunction(obj)
+
+
+def _get_asyncio_mode(config: Config) -> Mode:
+ val = config.getoption("asyncio_mode")
+ if val is None:
+ val = config.getini("asyncio_mode")
+ return Mode(val)
+
+
+def pytest_configure(config: Config) -> None:
+ """Inject documentation."""
+ config.addinivalue_line(
+ "markers",
+ "asyncio: "
+ "mark the test as a coroutine, it will be "
+ "run using an asyncio event loop",
+ )
+ if _get_asyncio_mode(config) == Mode.LEGACY:
+ config.issue_config_time_warning(LEGACY_MODE, stacklevel=2)
+
+
+@pytest.mark.tryfirst
+def pytest_report_header(config: Config) -> List[str]:
+ """Add asyncio config to pytest header."""
+ mode = _get_asyncio_mode(config)
+ return [f"asyncio: mode={mode}"]
+
+
+def _preprocess_async_fixtures(config: Config, holder: Set[FixtureDef]) -> None:
+ asyncio_mode = _get_asyncio_mode(config)
+ fixturemanager = config.pluginmanager.get_plugin("funcmanage")
+ for fixtures in fixturemanager._arg2fixturedefs.values():
+ for fixturedef in fixtures:
+ if fixturedef is holder:
+ continue
+ func = fixturedef.func
+ if not _is_coroutine_or_asyncgen(func):
+ # Nothing to do with a regular fixture function
+ continue
+ if not _has_explicit_asyncio_mark(func):
+ if asyncio_mode == Mode.STRICT:
+ # Ignore async fixtures without explicit asyncio mark in strict mode
+ # This applies to pytest_trio fixtures, for example
+ continue
+ elif asyncio_mode == Mode.AUTO:
+ # Enforce asyncio mode if 'auto'
+ _set_explicit_asyncio_mark(func)
+ elif asyncio_mode == Mode.LEGACY:
+ _set_explicit_asyncio_mark(func)
+ try:
+ code = func.__code__
+ except AttributeError:
+ code = func.__func__.__code__
+ name = (
+ f"<fixture {func.__qualname__}, file={code.co_filename}, "
+ f"line={code.co_firstlineno}>"
+ )
+ warnings.warn(
+ LEGACY_ASYNCIO_FIXTURE.format(name=name),
+ DeprecationWarning,
+ )
+
+ to_add = []
+ for name in ("request", "event_loop"):
+ if name not in fixturedef.argnames:
+ to_add.append(name)
+
+ if to_add:
+ fixturedef.argnames += tuple(to_add)
+
+ if inspect.isasyncgenfunction(func):
+ fixturedef.func = _wrap_asyncgen(func)
+ elif inspect.iscoroutinefunction(func):
+ fixturedef.func = _wrap_async(func)
+
+ assert _has_explicit_asyncio_mark(fixturedef.func)
+ holder.add(fixturedef)
+
+
+def _add_kwargs(
+ func: Callable[..., Any],
+ kwargs: Dict[str, Any],
+ event_loop: asyncio.AbstractEventLoop,
+ request: SubRequest,
+) -> Dict[str, Any]:
+ sig = inspect.signature(func)
+ ret = kwargs.copy()
+ if "request" in sig.parameters:
+ ret["request"] = request
+ if "event_loop" in sig.parameters:
+ ret["event_loop"] = event_loop
+ return ret
+
+
+def _wrap_asyncgen(func: Callable[..., AsyncIterator[_R]]) -> Callable[..., _R]:
+ @functools.wraps(func)
+ def _asyncgen_fixture_wrapper(
+ event_loop: asyncio.AbstractEventLoop, request: SubRequest, **kwargs: Any
+ ) -> _R:
+ gen_obj = func(**_add_kwargs(func, kwargs, event_loop, request))
+
+ async def setup() -> _R:
+ res = await gen_obj.__anext__()
+ return res
+
+ def finalizer() -> None:
+ """Yield again, to finalize."""
+
+ async def async_finalizer() -> None:
+ try:
+ await gen_obj.__anext__()
+ except StopAsyncIteration:
+ pass
+ else:
+ msg = "Async generator fixture didn't stop."
+ msg += "Yield only once."
+ raise ValueError(msg)
+
+ event_loop.run_until_complete(async_finalizer())
+
+ result = event_loop.run_until_complete(setup())
+ request.addfinalizer(finalizer)
+ return result
+
+ return _asyncgen_fixture_wrapper
+
+
+def _wrap_async(func: Callable[..., Awaitable[_R]]) -> Callable[..., _R]:
+ @functools.wraps(func)
+ def _async_fixture_wrapper(
+ event_loop: asyncio.AbstractEventLoop, request: SubRequest, **kwargs: Any
+ ) -> _R:
+ async def setup() -> _R:
+ res = await func(**_add_kwargs(func, kwargs, event_loop, request))
+ return res
+
+ return event_loop.run_until_complete(setup())
+
+ return _async_fixture_wrapper
+
+
+_HOLDER: Set[FixtureDef] = set()
+
+
+@pytest.mark.tryfirst
+def pytest_pycollect_makeitem(
+ collector: Union[pytest.Module, pytest.Class], name: str, obj: object
+) -> Union[
+ None, pytest.Item, pytest.Collector, List[Union[pytest.Item, pytest.Collector]]
+]:
+ """A pytest hook to collect asyncio coroutines."""
+ if not collector.funcnamefilter(name):
+ return None
+ _preprocess_async_fixtures(collector.config, _HOLDER)
+ if isinstance(obj, staticmethod):
+ # staticmethods need to be unwrapped.
+ obj = obj.__func__
+ if (
+ _is_coroutine(obj)
+ or _is_hypothesis_test(obj)
+ and _hypothesis_test_wraps_coroutine(obj)
+ ):
+ item = pytest.Function.from_parent(collector, name=name)
+ marker = item.get_closest_marker("asyncio")
+ if marker is not None:
+ return list(collector._genfunctions(name, obj))
+ else:
+ if _get_asyncio_mode(item.config) == Mode.AUTO:
+ # implicitly add asyncio marker if asyncio mode is on
+ ret = list(collector._genfunctions(name, obj))
+ for elem in ret:
+ elem.add_marker("asyncio")
+ return ret # type: ignore[return-value]
+ return None
+
+
+def _hypothesis_test_wraps_coroutine(function: Any) -> bool:
+ return _is_coroutine(function.hypothesis.inner_test)
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_fixture_post_finalizer(fixturedef: FixtureDef, request: SubRequest) -> None:
+ """Called after fixture teardown"""
+ if fixturedef.argname == "event_loop":
+ policy = asyncio.get_event_loop_policy()
+ try:
+ loop = policy.get_event_loop()
+ except RuntimeError:
+ loop = None
+ if loop is not None:
+ # Clean up existing loop to avoid ResourceWarnings
+ loop.close()
+ new_loop = policy.new_event_loop() # Replace existing event loop
+ # Ensure subsequent calls to get_event_loop() succeed
+ policy.set_event_loop(new_loop)
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_fixture_setup(
+ fixturedef: FixtureDef, request: SubRequest
+) -> Optional[object]:
+ """Adjust the event loop policy when an event loop is produced."""
+ if fixturedef.argname == "event_loop":
+ outcome = yield
+ loop = outcome.get_result()
+ policy = asyncio.get_event_loop_policy()
+ try:
+ old_loop = policy.get_event_loop()
+ if old_loop is not loop:
+ old_loop.close()
+ except RuntimeError:
+ # Swallow this, since it's probably bad event loop hygiene.
+ pass
+ policy.set_event_loop(loop)
+ return
+
+ yield
+
+
+@pytest.hookimpl(tryfirst=True, hookwrapper=True)
+def pytest_pyfunc_call(pyfuncitem: pytest.Function) -> Optional[object]:
+ """
+ Pytest hook called before a test case is run.
+
+ Wraps marked tests in a synchronous function
+ where the wrapped test coroutine is executed in an event loop.
+ """
+ marker = pyfuncitem.get_closest_marker("asyncio")
+ if marker is not None:
+ funcargs: Dict[str, object] = pyfuncitem.funcargs # type: ignore[name-defined]
+ loop = cast(asyncio.AbstractEventLoop, funcargs["event_loop"])
+ if _is_hypothesis_test(pyfuncitem.obj):
+ pyfuncitem.obj.hypothesis.inner_test = wrap_in_sync(
+ pyfuncitem,
+ pyfuncitem.obj.hypothesis.inner_test,
+ _loop=loop,
+ )
+ else:
+ pyfuncitem.obj = wrap_in_sync(
+ pyfuncitem,
+ pyfuncitem.obj,
+ _loop=loop,
+ )
+ yield
+
+
+def _is_hypothesis_test(function: Any) -> bool:
+ return getattr(function, "is_hypothesis_test", False)
+
+
+def wrap_in_sync(
+ pyfuncitem: pytest.Function,
+ func: Callable[..., Awaitable[Any]],
+ _loop: asyncio.AbstractEventLoop,
+):
+ """Return a sync wrapper around an async function executing it in the
+ current event loop."""
+
+ # if the function is already wrapped, we rewrap using the original one
+ # not using __wrapped__ because the original function may already be
+ # a wrapped one
+ raw_func = getattr(func, "_raw_test_func", None)
+ if raw_func is not None:
+ func = raw_func
+
+ @functools.wraps(func)
+ def inner(*args, **kwargs):
+ coro = func(*args, **kwargs)
+ if not inspect.isawaitable(coro):
+ pyfuncitem.warn(
+ pytest.PytestWarning(
+ f"The test {pyfuncitem} is marked with '@pytest.mark.asyncio' "
+ "but it is not an async function. "
+ "Please remove asyncio marker. "
+ "If the test is not marked explicitly, "
+ "check for global markers applied via 'pytestmark'."
+ )
+ )
+ return
+ task = asyncio.ensure_future(coro, loop=_loop)
+ try:
+ _loop.run_until_complete(task)
+ except BaseException:
+ # run_until_complete doesn't get the result from exceptions
+ # that are not subclasses of `Exception`. Consume all
+ # exceptions to prevent asyncio's warning from logging.
+ if task.done() and not task.cancelled():
+ task.exception()
+ raise
+
+ inner._raw_test_func = func # type: ignore[attr-defined]
+ return inner
+
+
+def pytest_runtest_setup(item: pytest.Item) -> None:
+ marker = item.get_closest_marker("asyncio")
+ if marker is None:
+ return
+ fixturenames = item.fixturenames # type: ignore[attr-defined]
+ # inject an event loop fixture for all async tests
+ if "event_loop" in fixturenames:
+ fixturenames.remove("event_loop")
+ fixturenames.insert(0, "event_loop")
+ obj = getattr(item, "obj", None)
+ if not getattr(obj, "hypothesis", False) and getattr(
+ obj, "is_hypothesis_test", False
+ ):
+ pytest.fail(
+ "test function `%r` is using Hypothesis, but pytest-asyncio "
+ "only works with Hypothesis 3.64.0 or later." % item
+ )
+
+
+@pytest.fixture
+def event_loop(request: "pytest.FixtureRequest") -> Iterator[asyncio.AbstractEventLoop]:
+ """Create an instance of the default event loop for each test case."""
+ loop = asyncio.get_event_loop_policy().new_event_loop()
+ yield loop
+ loop.close()
+
+
+def _unused_port(socket_type: int) -> int:
+ """Find an unused localhost port from 1024-65535 and return it."""
+ with contextlib.closing(socket.socket(type=socket_type)) as sock:
+ sock.bind(("127.0.0.1", 0))
+ return sock.getsockname()[1]
+
+
+@pytest.fixture
+def unused_tcp_port() -> int:
+ return _unused_port(socket.SOCK_STREAM)
+
+
+@pytest.fixture
+def unused_udp_port() -> int:
+ return _unused_port(socket.SOCK_DGRAM)
+
+
+@pytest.fixture(scope="session")
+def unused_tcp_port_factory() -> Callable[[], int]:
+ """A factory function, producing different unused TCP ports."""
+ produced = set()
+
+ def factory():
+ """Return an unused port."""
+ port = _unused_port(socket.SOCK_STREAM)
+
+ while port in produced:
+ port = _unused_port(socket.SOCK_STREAM)
+
+ produced.add(port)
+
+ return port
+
+ return factory
+
+
+@pytest.fixture(scope="session")
+def unused_udp_port_factory() -> Callable[[], int]:
+ """A factory function, producing different unused UDP ports."""
+ produced = set()
+
+ def factory():
+ """Return an unused port."""
+ port = _unused_port(socket.SOCK_DGRAM)
+
+ while port in produced:
+ port = _unused_port(socket.SOCK_DGRAM)
+
+ produced.add(port)
+
+ return port
+
+ return factory
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/py.typed b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg b/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg
new file mode 100644
index 0000000000..85e3fdb323
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg
@@ -0,0 +1,73 @@
+[metadata]
+name = pytest-asyncio
+version = attr: pytest_asyncio.__version__
+url = https://github.com/pytest-dev/pytest-asyncio
+project_urls =
+ GitHub = https://github.com/pytest-dev/pytest-asyncio
+description = Pytest support for asyncio
+long_description = file: README.rst
+long_description_content_type = text/x-rst
+author = Tin Tvrtković <tinchester@gmail.com>
+author_email = tinchester@gmail.com
+license = Apache 2.0
+license_file = LICENSE
+classifiers =
+ Development Status :: 4 - Beta
+
+ Intended Audience :: Developers
+
+ License :: OSI Approved :: Apache Software License
+
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
+ Programming Language :: Python :: 3.11
+
+ Topic :: Software Development :: Testing
+
+ Framework :: AsyncIO
+ Framework :: Pytest
+ Typing :: Typed
+
+[options]
+python_requires = >=3.7
+packages = find:
+include_package_data = True
+install_requires =
+ pytest >= 6.1.0
+ typing-extensions >= 3.7.2; python_version < "3.8"
+
+[options.extras_require]
+testing =
+ coverage >= 6.2
+ hypothesis >= 5.7.1
+ flaky >= 3.5.0
+ mypy >= 0.931
+ pytest-trio >= 0.7.0
+
+[options.entry_points]
+pytest11 =
+ asyncio = pytest_asyncio.plugin
+
+[coverage:run]
+source = pytest_asyncio
+branch = true
+
+[coverage:report]
+show_missing = true
+
+[tool:pytest]
+addopts = -rsx --tb=short
+testpaths = tests
+asyncio_mode = auto
+junit_family = xunit2
+filterwarnings = error
+
+[flake8]
+max-line-length = 88
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/__init__.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures.py
new file mode 100644
index 0000000000..7ddd04ab86
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures.py
@@ -0,0 +1,25 @@
+import asyncio
+import unittest.mock
+
+import pytest
+
+START = object()
+END = object()
+RETVAL = object()
+
+
+@pytest.fixture
+def mock():
+ return unittest.mock.Mock(return_value=RETVAL)
+
+
+@pytest.fixture
+async def async_fixture(mock):
+ return await asyncio.sleep(0.1, result=mock(START))
+
+
+@pytest.mark.asyncio
+async def test_async_fixture(async_fixture, mock):
+ assert mock.call_count == 1
+ assert mock.call_args_list[-1] == unittest.mock.call(START)
+ assert async_fixture is RETVAL
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_scope.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_scope.py
new file mode 100644
index 0000000000..b150f8a8e5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_scope.py
@@ -0,0 +1,25 @@
+"""
+We support module-scoped async fixtures, but only if the event loop is
+module-scoped too.
+"""
+import asyncio
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def event_loop():
+ """A module-scoped event loop."""
+ return asyncio.new_event_loop()
+
+
+@pytest.fixture(scope="module")
+async def async_fixture():
+ await asyncio.sleep(0.1)
+ return 1
+
+
+@pytest.mark.asyncio
+async def test_async_fixture_scope(async_fixture):
+ assert async_fixture == 1
+ await asyncio.sleep(0.1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_with_finalizer.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_with_finalizer.py
new file mode 100644
index 0000000000..2e72d5de04
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_fixtures_with_finalizer.py
@@ -0,0 +1,59 @@
+import asyncio
+import functools
+
+import pytest
+
+
+@pytest.mark.asyncio
+async def test_module_with_event_loop_finalizer(port_with_event_loop_finalizer):
+ await asyncio.sleep(0.01)
+ assert port_with_event_loop_finalizer
+
+
+@pytest.mark.asyncio
+async def test_module_with_get_event_loop_finalizer(port_with_get_event_loop_finalizer):
+ await asyncio.sleep(0.01)
+ assert port_with_get_event_loop_finalizer
+
+
+@pytest.fixture(scope="module")
+def event_loop():
+ """Change event_loop fixture to module level."""
+ policy = asyncio.get_event_loop_policy()
+ loop = policy.new_event_loop()
+ yield loop
+ loop.close()
+
+
+@pytest.fixture(scope="module")
+async def port_with_event_loop_finalizer(request, event_loop):
+ def port_finalizer(finalizer):
+ async def port_afinalizer():
+ # await task using loop provided by event_loop fixture
+ # RuntimeError is raised if task is created on a different loop
+ await finalizer
+
+ event_loop.run_until_complete(port_afinalizer())
+
+ worker = asyncio.ensure_future(asyncio.sleep(0.2))
+ request.addfinalizer(functools.partial(port_finalizer, worker))
+ return True
+
+
+@pytest.fixture(scope="module")
+async def port_with_get_event_loop_finalizer(request, event_loop):
+ def port_finalizer(finalizer):
+ async def port_afinalizer():
+ # await task using current loop retrieved from the event loop policy
+ # RuntimeError is raised if task is created on a different loop.
+ # This can happen when pytest_fixture_setup
+ # does not set up the loop correctly,
+ # for example when policy.set_event_loop() is called with a wrong argument
+ await finalizer
+
+ current_loop = asyncio.get_event_loop_policy().get_event_loop()
+ current_loop.run_until_complete(port_afinalizer())
+
+ worker = asyncio.ensure_future(asyncio.sleep(0.2))
+ request.addfinalizer(functools.partial(port_finalizer, worker))
+ return True
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_gen_fixtures.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_gen_fixtures.py
new file mode 100644
index 0000000000..0bea745868
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_async_gen_fixtures.py
@@ -0,0 +1,38 @@
+import unittest.mock
+
+import pytest
+
+START = object()
+END = object()
+RETVAL = object()
+
+
+@pytest.fixture(scope="module")
+def mock():
+ return unittest.mock.Mock(return_value=RETVAL)
+
+
+@pytest.fixture
+async def async_gen_fixture(mock):
+ try:
+ yield mock(START)
+ except Exception as e:
+ mock(e)
+ else:
+ mock(END)
+
+
+@pytest.mark.asyncio
+async def test_async_gen_fixture(async_gen_fixture, mock):
+ assert mock.called
+ assert mock.call_args_list[-1] == unittest.mock.call(START)
+ assert async_gen_fixture is RETVAL
+
+
+@pytest.mark.asyncio
+async def test_async_gen_fixture_finalized(mock):
+ try:
+ assert mock.called
+ assert mock.call_args_list[-1] == unittest.mock.call(END)
+ finally:
+ mock.reset_mock()
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_nested.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_nested.py
new file mode 100644
index 0000000000..e81e782452
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_nested.py
@@ -0,0 +1,26 @@
+import asyncio
+
+import pytest
+
+
+@pytest.fixture()
+async def async_inner_fixture():
+ await asyncio.sleep(0.01)
+ print("inner start")
+ yield True
+ print("inner stop")
+
+
+@pytest.fixture()
+async def async_fixture_outer(async_inner_fixture, event_loop):
+ await asyncio.sleep(0.01)
+ print("outer start")
+ assert async_inner_fixture is True
+ yield True
+ print("outer stop")
+
+
+@pytest.mark.asyncio
+async def test_async_fixture(async_fixture_outer):
+ assert async_fixture_outer is True
+ print("test_async_fixture")
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_parametrized_loop.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_parametrized_loop.py
new file mode 100644
index 0000000000..2fb8befa7f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/async_fixtures/test_parametrized_loop.py
@@ -0,0 +1,31 @@
+import asyncio
+
+import pytest
+
+TESTS_COUNT = 0
+
+
+def teardown_module():
+ # parametrized 2 * 2 times: 2 for 'event_loop' and 2 for 'fix'
+ assert TESTS_COUNT == 4
+
+
+@pytest.fixture(scope="module", params=[1, 2])
+def event_loop(request):
+ request.param
+ loop = asyncio.new_event_loop()
+ yield loop
+ loop.close()
+
+
+@pytest.fixture(params=["a", "b"])
+async def fix(request):
+ await asyncio.sleep(0)
+ return request.param
+
+
+@pytest.mark.asyncio
+async def test_parametrized_loop(fix):
+ await asyncio.sleep(0)
+ global TESTS_COUNT
+ TESTS_COUNT += 1
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/conftest.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/conftest.py
new file mode 100644
index 0000000000..4aa8c89aa7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/conftest.py
@@ -0,0 +1,32 @@
+import asyncio
+
+import pytest
+
+pytest_plugins = "pytester"
+
+
+@pytest.fixture
+def dependent_fixture(event_loop):
+ """A fixture dependent on the event_loop fixture, doing some cleanup."""
+ counter = 0
+
+ async def just_a_sleep():
+ """Just sleep a little while."""
+ nonlocal event_loop
+ await asyncio.sleep(0.1)
+ nonlocal counter
+ counter += 1
+
+ event_loop.run_until_complete(just_a_sleep())
+ yield
+ event_loop.run_until_complete(just_a_sleep())
+
+ assert counter == 2
+
+
+@pytest.fixture(scope="session", name="factory_involving_factories")
+def factory_involving_factories_fixture(unused_tcp_port_factory):
+ def factory():
+ return unused_tcp_port_factory()
+
+ return factory
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_base.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_base.py
new file mode 100644
index 0000000000..e6da342732
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_base.py
@@ -0,0 +1,88 @@
+"""Tests for the Hypothesis integration, which wraps async functions in a
+sync shim for Hypothesis.
+"""
+import asyncio
+from textwrap import dedent
+
+import pytest
+from hypothesis import given, strategies as st
+
+
+@pytest.fixture(scope="module")
+def event_loop():
+ loop = asyncio.get_event_loop_policy().new_event_loop()
+ yield loop
+ loop.close()
+
+
+@given(st.integers())
+@pytest.mark.asyncio
+async def test_mark_inner(n):
+ assert isinstance(n, int)
+
+
+@pytest.mark.asyncio
+@given(st.integers())
+async def test_mark_outer(n):
+ assert isinstance(n, int)
+
+
+@pytest.mark.parametrize("y", [1, 2])
+@given(x=st.none())
+@pytest.mark.asyncio
+async def test_mark_and_parametrize(x, y):
+ assert x is None
+ assert y in (1, 2)
+
+
+@given(st.integers())
+@pytest.mark.asyncio
+async def test_can_use_fixture_provided_event_loop(event_loop, n):
+ semaphore = asyncio.Semaphore(value=0)
+ event_loop.call_soon(semaphore.release)
+ await semaphore.acquire()
+
+
+def test_async_auto_marked(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+ from hypothesis import given
+ import hypothesis.strategies as st
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @given(n=st.integers())
+ async def test_hypothesis(n: int):
+ assert isinstance(n, int)
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
+
+
+def test_sync_not_auto_marked(testdir):
+ """Assert that synchronous Hypothesis functions are not marked with asyncio"""
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+ from hypothesis import given
+ import hypothesis.strategies as st
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @given(n=st.integers())
+ def test_hypothesis(request, n: int):
+ markers = [marker.name for marker in request.node.own_markers]
+ assert "asyncio" not in markers
+ assert isinstance(n, int)
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_inherited_test.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_inherited_test.py
new file mode 100644
index 0000000000..a77622648f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/hypothesis/test_inherited_test.py
@@ -0,0 +1,20 @@
+import hypothesis.strategies as st
+import pytest
+from hypothesis import given
+
+
+class BaseClass:
+ @pytest.mark.asyncio
+ @given(value=st.integers())
+ async def test_hypothesis(self, value: int) -> None:
+ pass
+
+
+class TestOne(BaseClass):
+ """During the first execution the Hypothesis test
+ is wrapped in a synchronous function."""
+
+
+class TestTwo(BaseClass):
+ """Execute the test a second time to ensure that
+ the test receives a fresh event loop."""
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/conftest.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/conftest.py
new file mode 100644
index 0000000000..223160c2b6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/conftest.py
@@ -0,0 +1,17 @@
+import asyncio
+
+import pytest
+
+
+class CustomSelectorLoop(asyncio.SelectorEventLoop):
+ """A subclass with no overrides, just to test for presence."""
+
+
+loop = CustomSelectorLoop()
+
+
+@pytest.fixture(scope="module")
+def event_loop():
+ """Create an instance of the default event loop for each test case."""
+ yield loop
+ loop.close()
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/test_loop_fixture_scope.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/test_loop_fixture_scope.py
new file mode 100644
index 0000000000..679ab48f09
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/loop_fixture_scope/test_loop_fixture_scope.py
@@ -0,0 +1,16 @@
+"""Unit tests for overriding the event loop with a larger scoped one."""
+import asyncio
+
+import pytest
+
+
+@pytest.mark.asyncio
+async def test_for_custom_loop():
+ """This test should be executed using the custom loop."""
+ await asyncio.sleep(0.01)
+ assert type(asyncio.get_event_loop()).__name__ == "CustomSelectorLoop"
+
+
+@pytest.mark.asyncio
+async def test_dependent_fixture(dependent_fixture):
+ await asyncio.sleep(0.1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_class_marker.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_class_marker.py
new file mode 100644
index 0000000000..d46c3af74f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_class_marker.py
@@ -0,0 +1,25 @@
+"""Test if pytestmark works when defined on a class."""
+import asyncio
+
+import pytest
+
+
+class TestPyTestMark:
+ pytestmark = pytest.mark.asyncio
+
+ async def test_is_asyncio(self, event_loop, sample_fixture):
+ assert asyncio.get_event_loop()
+ counter = 1
+
+ async def inc():
+ nonlocal counter
+ counter += 1
+ await asyncio.sleep(0)
+
+ await asyncio.ensure_future(inc())
+ assert counter == 2
+
+
+@pytest.fixture
+def sample_fixture():
+ return None
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_module_marker.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_module_marker.py
new file mode 100644
index 0000000000..2f69dbc933
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/markers/test_module_marker.py
@@ -0,0 +1,39 @@
+"""Test if pytestmark works when defined in a module."""
+import asyncio
+
+import pytest
+
+pytestmark = pytest.mark.asyncio
+
+
+class TestPyTestMark:
+ async def test_is_asyncio(self, event_loop, sample_fixture):
+ assert asyncio.get_event_loop()
+
+ counter = 1
+
+ async def inc():
+ nonlocal counter
+ counter += 1
+ await asyncio.sleep(0)
+
+ await asyncio.ensure_future(inc())
+ assert counter == 2
+
+
+async def test_is_asyncio(event_loop, sample_fixture):
+ assert asyncio.get_event_loop()
+ counter = 1
+
+ async def inc():
+ nonlocal counter
+ counter += 1
+ await asyncio.sleep(0)
+
+ await asyncio.ensure_future(inc())
+ assert counter == 2
+
+
+@pytest.fixture
+def sample_fixture():
+ return None
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_auto_mode.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_auto_mode.py
new file mode 100644
index 0000000000..fc4d2df076
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_auto_mode.py
@@ -0,0 +1,139 @@
+from textwrap import dedent
+
+
+def test_auto_mode_cmdline(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ async def test_a():
+ await asyncio.sleep(0)
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
+
+
+def test_auto_mode_cfg(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ async def test_a():
+ await asyncio.sleep(0)
+ """
+ )
+ )
+ testdir.makefile(".ini", pytest="[pytest]\nasyncio_mode = auto\n")
+ result = testdir.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+def test_auto_mode_async_fixture(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest.fixture
+ async def fixture_a():
+ await asyncio.sleep(0)
+ return 1
+
+ async def test_a(fixture_a):
+ await asyncio.sleep(0)
+ assert fixture_a == 1
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
+
+
+def test_auto_mode_method_fixture(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+
+ class TestA:
+
+ @pytest.fixture
+ async def fixture_a(self):
+ await asyncio.sleep(0)
+ return 1
+
+ async def test_a(self, fixture_a):
+ await asyncio.sleep(0)
+ assert fixture_a == 1
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
+
+
+def test_auto_mode_static_method(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+
+ pytest_plugins = 'pytest_asyncio'
+
+
+ class TestA:
+
+ @staticmethod
+ async def test_a():
+ await asyncio.sleep(0)
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
+
+
+def test_auto_mode_static_method_fixture(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+
+ class TestA:
+
+ @staticmethod
+ @pytest.fixture
+ async def fixture_a():
+ await asyncio.sleep(0)
+ return 1
+
+ @staticmethod
+ async def test_a(fixture_a):
+ await asyncio.sleep(0)
+ assert fixture_a == 1
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_legacy_mode.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_legacy_mode.py
new file mode 100644
index 0000000000..12d4afe18d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_legacy_mode.py
@@ -0,0 +1,112 @@
+from textwrap import dedent
+
+LEGACY_MODE = (
+ "The 'asyncio_mode' default value will change to 'strict' in future, "
+ "please explicitly use 'asyncio_mode=strict' or 'asyncio_mode=auto' "
+ "in pytest configuration file."
+)
+
+LEGACY_ASYNCIO_FIXTURE = (
+ "'@pytest.fixture' is applied to {name} "
+ "in 'legacy' mode, "
+ "please replace it with '@pytest_asyncio.fixture' as a preparation "
+ "for switching to 'strict' mode (or use 'auto' mode to seamlessly handle "
+ "all these fixtures as asyncio-driven)."
+).format(name="*")
+
+
+def test_warning_for_legacy_mode_cmdline(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest.mark.asyncio
+ async def test_a():
+ await asyncio.sleep(0)
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=legacy")
+ assert result.parseoutcomes()["warnings"] == 1
+ result.stdout.fnmatch_lines(["*" + LEGACY_MODE + "*"])
+
+
+def test_warning_for_legacy_mode_cfg(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest.mark.asyncio
+ async def test_a():
+ await asyncio.sleep(0)
+ """
+ )
+ )
+ testdir.makefile(".ini", pytest="[pytest]\nasyncio_mode = legacy\n")
+ result = testdir.runpytest()
+ assert result.parseoutcomes()["warnings"] == 1
+ result.stdout.fnmatch_lines(["*" + LEGACY_MODE + "*"])
+ result.stdout.no_fnmatch_line("*" + LEGACY_ASYNCIO_FIXTURE + "*")
+
+
+def test_warning_for_legacy_fixture(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest.fixture
+ async def fixture_a():
+ await asyncio.sleep(0)
+ return 1
+
+ @pytest.mark.asyncio
+ async def test_a(fixture_a):
+ await asyncio.sleep(0)
+ assert fixture_a == 1
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=legacy")
+ assert result.parseoutcomes()["warnings"] == 2
+ result.stdout.fnmatch_lines(["*" + LEGACY_ASYNCIO_FIXTURE + "*"])
+
+
+def test_warning_for_legacy_method_fixture(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+
+ class TestA:
+
+ @pytest.fixture
+ async def fixture_a(self):
+ await asyncio.sleep(0)
+ return 1
+
+ @pytest.mark.asyncio
+ async def test_a(self, fixture_a):
+ await asyncio.sleep(0)
+ assert fixture_a == 1
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=legacy")
+ assert result.parseoutcomes()["warnings"] == 2
+ result.stdout.fnmatch_lines(["*" + LEGACY_ASYNCIO_FIXTURE + "*"])
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_strict_mode.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_strict_mode.py
new file mode 100644
index 0000000000..3b6487c72b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/modes/test_strict_mode.py
@@ -0,0 +1,68 @@
+from textwrap import dedent
+
+
+def test_strict_mode_cmdline(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest.mark.asyncio
+ async def test_a():
+ await asyncio.sleep(0)
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=strict")
+ result.assert_outcomes(passed=1)
+
+
+def test_strict_mode_cfg(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest.mark.asyncio
+ async def test_a():
+ await asyncio.sleep(0)
+ """
+ )
+ )
+ testdir.makefile(".ini", pytest="[pytest]\nasyncio_mode = strict\n")
+ result = testdir.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+def test_strict_mode_method_fixture(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import pytest
+ import pytest_asyncio
+
+ pytest_plugins = 'pytest_asyncio'
+
+ class TestA:
+
+ @pytest_asyncio.fixture
+ async def fixture_a(self):
+ await asyncio.sleep(0)
+ return 1
+
+ @pytest.mark.asyncio
+ async def test_a(self, fixture_a):
+ await asyncio.sleep(0)
+ assert fixture_a == 1
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=auto")
+ result.assert_outcomes(passed=1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/conftest.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/conftest.py
new file mode 100644
index 0000000000..ebcb627a6d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/conftest.py
@@ -0,0 +1,15 @@
+import asyncio
+
+import pytest
+
+
+class CustomSelectorLoop(asyncio.SelectorEventLoop):
+ """A subclass with no overrides, just to test for presence."""
+
+
+@pytest.fixture
+def event_loop():
+ """Create an instance of the default event loop for each test case."""
+ loop = CustomSelectorLoop()
+ yield loop
+ loop.close()
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/test_alternative_loops.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/test_alternative_loops.py
new file mode 100644
index 0000000000..5f66c96795
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/multiloop/test_alternative_loops.py
@@ -0,0 +1,16 @@
+"""Unit tests for overriding the event loop."""
+import asyncio
+
+import pytest
+
+
+@pytest.mark.asyncio
+async def test_for_custom_loop():
+ """This test should be executed using the custom loop."""
+ await asyncio.sleep(0.01)
+ assert type(asyncio.get_event_loop()).__name__ == "CustomSelectorLoop"
+
+
+@pytest.mark.asyncio
+async def test_dependent_fixture(dependent_fixture):
+ await asyncio.sleep(0.1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/conftest.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/conftest.py
new file mode 100644
index 0000000000..2c5cef24ff
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/conftest.py
@@ -0,0 +1,16 @@
+"""Defines and sets a custom event loop policy"""
+import asyncio
+from asyncio import DefaultEventLoopPolicy, SelectorEventLoop
+
+
+class TestEventLoop(SelectorEventLoop):
+ pass
+
+
+class TestEventLoopPolicy(DefaultEventLoopPolicy):
+ def new_event_loop(self):
+ return TestEventLoop()
+
+
+# This statement represents a code which sets a custom event loop policy
+asyncio.set_event_loop_policy(TestEventLoopPolicy())
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/test_respects_event_loop_policy.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/test_respects_event_loop_policy.py
new file mode 100644
index 0000000000..610b33889e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/respect_event_loop_policy/test_respects_event_loop_policy.py
@@ -0,0 +1,17 @@
+"""Tests that any externally provided event loop policy remains unaltered."""
+import asyncio
+
+import pytest
+
+
+@pytest.mark.asyncio
+async def test_uses_loop_provided_by_custom_policy():
+ """Asserts that test cases use the event loop
+ provided by the custom event loop policy"""
+ assert type(asyncio.get_event_loop()).__name__ == "TestEventLoop"
+
+
+@pytest.mark.asyncio
+async def test_custom_policy_is_not_overwritten():
+ """Asserts that any custom event loop policy stays the same across test cases"""
+ assert type(asyncio.get_event_loop()).__name__ == "TestEventLoop"
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_asyncio_fixture.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_asyncio_fixture.py
new file mode 100644
index 0000000000..3a28cebb63
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_asyncio_fixture.py
@@ -0,0 +1,64 @@
+import asyncio
+from textwrap import dedent
+
+import pytest
+
+import pytest_asyncio
+
+
+@pytest_asyncio.fixture
+async def fixture_bare():
+ await asyncio.sleep(0)
+ return 1
+
+
+@pytest.mark.asyncio
+async def test_bare_fixture(fixture_bare):
+ await asyncio.sleep(0)
+ assert fixture_bare == 1
+
+
+@pytest_asyncio.fixture(name="new_fixture_name")
+async def fixture_with_name(request):
+ await asyncio.sleep(0)
+ return request.fixturename
+
+
+@pytest.mark.asyncio
+async def test_fixture_with_name(new_fixture_name):
+ await asyncio.sleep(0)
+ assert new_fixture_name == "new_fixture_name"
+
+
+@pytest_asyncio.fixture(params=[2, 4])
+async def fixture_with_params(request):
+ await asyncio.sleep(0)
+ return request.param
+
+
+@pytest.mark.asyncio
+async def test_fixture_with_params(fixture_with_params):
+ await asyncio.sleep(0)
+ assert fixture_with_params % 2 == 0
+
+
+@pytest.mark.parametrize("mode", ("auto", "strict", "legacy"))
+def test_sync_function_uses_async_fixture(testdir, mode):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import pytest_asyncio
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest_asyncio.fixture
+ async def always_true():
+ return True
+
+ def test_sync_function_uses_async_fixture(always_true):
+ assert always_true is True
+ """
+ )
+ )
+ result = testdir.runpytest(f"--asyncio-mode={mode}")
+ result.assert_outcomes(passed=1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_dependent_fixtures.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_dependent_fixtures.py
new file mode 100644
index 0000000000..dc70fe9cd3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_dependent_fixtures.py
@@ -0,0 +1,14 @@
+import asyncio
+
+import pytest
+
+
+@pytest.mark.asyncio
+async def test_dependent_fixture(dependent_fixture):
+ """Test a dependent fixture."""
+ await asyncio.sleep(0.1)
+
+
+@pytest.mark.asyncio
+async def test_factory_involving_factories(factory_involving_factories):
+ factory_involving_factories()
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_event_loop_scope.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_event_loop_scope.py
new file mode 100644
index 0000000000..21fd641515
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_event_loop_scope.py
@@ -0,0 +1,37 @@
+"""Test the event loop fixture provides a separate loop for each test.
+
+These tests need to be run together.
+"""
+import asyncio
+
+import pytest
+
+loop: asyncio.AbstractEventLoop
+
+
+def test_1():
+ global loop
+ # The main thread should have a default event loop.
+ loop = asyncio.get_event_loop_policy().get_event_loop()
+
+
+@pytest.mark.asyncio
+async def test_2():
+ global loop
+ running_loop = asyncio.get_event_loop_policy().get_event_loop()
+ # Make sure this test case received a different loop
+ assert running_loop is not loop
+ loop = running_loop # Store the loop reference for later
+
+
+def test_3():
+ global loop
+ current_loop = asyncio.get_event_loop_policy().get_event_loop()
+ # Now the event loop from test_2 should have been cleaned up
+ assert loop is not current_loop
+
+
+def test_4(event_loop):
+ # If a test sets the loop to None -- pytest_fixture_post_finalizer()
+ # still should work
+ asyncio.get_event_loop_policy().set_event_loop(None)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_flaky_integration.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_flaky_integration.py
new file mode 100644
index 0000000000..54c9d2eaeb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_flaky_integration.py
@@ -0,0 +1,43 @@
+"""Tests for the Flaky integration, which retries failed tests.
+"""
+from textwrap import dedent
+
+
+def test_auto_mode_cmdline(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import asyncio
+ import flaky
+ import pytest
+
+ _threshold = -1
+
+ @flaky.flaky(3, 2)
+ @pytest.mark.asyncio
+ async def test_asyncio_flaky_thing_that_fails_then_succeeds():
+ global _threshold
+ await asyncio.sleep(0.1)
+ _threshold += 1
+ assert _threshold != 1
+ """
+ )
+ )
+ # runpytest_subprocess() is required to don't pollute the output
+ # with flaky restart information
+ result = testdir.runpytest_subprocess("--asyncio-mode=strict")
+ result.assert_outcomes(passed=1)
+ result.stdout.fnmatch_lines(
+ [
+ "===Flaky Test Report===",
+ "test_asyncio_flaky_thing_that_fails_then_succeeds passed 1 "
+ "out of the required 2 times. Running test again until it passes 2 times.",
+ "test_asyncio_flaky_thing_that_fails_then_succeeds failed "
+ "(1 runs remaining out of 3).",
+ " <class 'AssertionError'>",
+ " assert 1 != 1",
+ "test_asyncio_flaky_thing_that_fails_then_succeeds passed 2 "
+ "out of the required 2 times. Success!",
+ "===End Flaky Test Report===",
+ ]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_simple.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_simple.py
new file mode 100644
index 0000000000..dc68d61ec2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_simple.py
@@ -0,0 +1,275 @@
+"""Quick'n'dirty unit tests for provided fixtures and markers."""
+import asyncio
+from textwrap import dedent
+
+import pytest
+
+import pytest_asyncio.plugin
+
+
+async def async_coro():
+ await asyncio.sleep(0)
+ return "ok"
+
+
+def test_event_loop_fixture(event_loop):
+ """Test the injection of the event_loop fixture."""
+ assert event_loop
+ ret = event_loop.run_until_complete(async_coro())
+ assert ret == "ok"
+
+
+@pytest.mark.asyncio
+async def test_asyncio_marker():
+ """Test the asyncio pytest marker."""
+ await asyncio.sleep(0)
+
+
+@pytest.mark.xfail(reason="need a failure", strict=True)
+@pytest.mark.asyncio
+async def test_asyncio_marker_fail():
+ raise AssertionError
+
+
+@pytest.mark.asyncio
+async def test_asyncio_marker_with_default_param(a_param=None):
+ """Test the asyncio pytest marker."""
+ await asyncio.sleep(0)
+
+
+@pytest.mark.asyncio
+async def test_unused_port_fixture(unused_tcp_port, event_loop):
+ """Test the unused TCP port fixture."""
+
+ async def closer(_, writer):
+ writer.close()
+
+ server1 = await asyncio.start_server(closer, host="localhost", port=unused_tcp_port)
+
+ with pytest.raises(IOError):
+ await asyncio.start_server(closer, host="localhost", port=unused_tcp_port)
+
+ server1.close()
+ await server1.wait_closed()
+
+
+@pytest.mark.asyncio
+async def test_unused_udp_port_fixture(unused_udp_port, event_loop):
+ """Test the unused TCP port fixture."""
+
+ class Closer:
+ def connection_made(self, transport):
+ pass
+
+ def connection_lost(self, *arg, **kwd):
+ pass
+
+ transport1, _ = await event_loop.create_datagram_endpoint(
+ Closer,
+ local_addr=("127.0.0.1", unused_udp_port),
+ reuse_port=False,
+ )
+
+ with pytest.raises(IOError):
+ await event_loop.create_datagram_endpoint(
+ Closer,
+ local_addr=("127.0.0.1", unused_udp_port),
+ reuse_port=False,
+ )
+
+ transport1.abort()
+
+
+@pytest.mark.asyncio
+async def test_unused_port_factory_fixture(unused_tcp_port_factory, event_loop):
+ """Test the unused TCP port factory fixture."""
+
+ async def closer(_, writer):
+ writer.close()
+
+ port1, port2, port3 = (
+ unused_tcp_port_factory(),
+ unused_tcp_port_factory(),
+ unused_tcp_port_factory(),
+ )
+
+ server1 = await asyncio.start_server(closer, host="localhost", port=port1)
+ server2 = await asyncio.start_server(closer, host="localhost", port=port2)
+ server3 = await asyncio.start_server(closer, host="localhost", port=port3)
+
+ for port in port1, port2, port3:
+ with pytest.raises(IOError):
+ await asyncio.start_server(closer, host="localhost", port=port)
+
+ server1.close()
+ await server1.wait_closed()
+ server2.close()
+ await server2.wait_closed()
+ server3.close()
+ await server3.wait_closed()
+
+
+@pytest.mark.asyncio
+async def test_unused_udp_port_factory_fixture(unused_udp_port_factory, event_loop):
+ """Test the unused UDP port factory fixture."""
+
+ class Closer:
+ def connection_made(self, transport):
+ pass
+
+ def connection_lost(self, *arg, **kwd):
+ pass
+
+ port1, port2, port3 = (
+ unused_udp_port_factory(),
+ unused_udp_port_factory(),
+ unused_udp_port_factory(),
+ )
+
+ transport1, _ = await event_loop.create_datagram_endpoint(
+ Closer,
+ local_addr=("127.0.0.1", port1),
+ reuse_port=False,
+ )
+ transport2, _ = await event_loop.create_datagram_endpoint(
+ Closer,
+ local_addr=("127.0.0.1", port2),
+ reuse_port=False,
+ )
+ transport3, _ = await event_loop.create_datagram_endpoint(
+ Closer,
+ local_addr=("127.0.0.1", port3),
+ reuse_port=False,
+ )
+
+ for port in port1, port2, port3:
+ with pytest.raises(IOError):
+ await event_loop.create_datagram_endpoint(
+ Closer,
+ local_addr=("127.0.0.1", port),
+ reuse_port=False,
+ )
+
+ transport1.abort()
+ transport2.abort()
+ transport3.abort()
+
+
+def test_unused_port_factory_duplicate(unused_tcp_port_factory, monkeypatch):
+ """Test correct avoidance of duplicate ports."""
+ counter = 0
+
+ def mock_unused_tcp_port(_ignored):
+ """Force some duplicate ports."""
+ nonlocal counter
+ counter += 1
+ if counter < 5:
+ return 10000
+ else:
+ return 10000 + counter
+
+ monkeypatch.setattr(pytest_asyncio.plugin, "_unused_port", mock_unused_tcp_port)
+
+ assert unused_tcp_port_factory() == 10000
+ assert unused_tcp_port_factory() > 10000
+
+
+def test_unused_udp_port_factory_duplicate(unused_udp_port_factory, monkeypatch):
+ """Test correct avoidance of duplicate UDP ports."""
+ counter = 0
+
+ def mock_unused_udp_port(_ignored):
+ """Force some duplicate ports."""
+ nonlocal counter
+ counter += 1
+ if counter < 5:
+ return 10000
+ else:
+ return 10000 + counter
+
+ monkeypatch.setattr(pytest_asyncio.plugin, "_unused_port", mock_unused_udp_port)
+
+ assert unused_udp_port_factory() == 10000
+ assert unused_udp_port_factory() > 10000
+
+
+class TestMarkerInClassBasedTests:
+ """Test that asyncio marked functions work for methods of test classes."""
+
+ @pytest.mark.asyncio
+ async def test_asyncio_marker_with_explicit_loop_fixture(self, event_loop):
+ """Test the "asyncio" marker works on a method in
+ a class-based test with explicit loop fixture."""
+ ret = await async_coro()
+ assert ret == "ok"
+
+ @pytest.mark.asyncio
+ async def test_asyncio_marker_with_implicit_loop_fixture(self):
+ """Test the "asyncio" marker works on a method in
+ a class-based test with implicit loop fixture."""
+ ret = await async_coro()
+ assert ret == "ok"
+
+
+class TestEventLoopStartedBeforeFixtures:
+ @pytest.fixture
+ async def loop(self):
+ return asyncio.get_event_loop()
+
+ @staticmethod
+ def foo():
+ return 1
+
+ @pytest.mark.asyncio
+ async def test_no_event_loop(self, loop):
+ assert await loop.run_in_executor(None, self.foo) == 1
+
+ @pytest.mark.asyncio
+ async def test_event_loop_after_fixture(self, loop, event_loop):
+ assert await loop.run_in_executor(None, self.foo) == 1
+
+ @pytest.mark.asyncio
+ async def test_event_loop_before_fixture(self, event_loop, loop):
+ assert await loop.run_in_executor(None, self.foo) == 1
+
+
+@pytest.mark.asyncio
+async def test_no_warning_on_skip():
+ pytest.skip("Test a skip error inside asyncio")
+
+
+def test_async_close_loop(event_loop):
+ event_loop.close()
+ return "ok"
+
+
+def test_warn_asyncio_marker_for_regular_func(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import pytest
+
+ pytest_plugins = 'pytest_asyncio'
+
+ @pytest.mark.asyncio
+ def test_a():
+ pass
+ """
+ )
+ )
+ testdir.makefile(
+ ".ini",
+ pytest=dedent(
+ """\
+ [pytest]
+ asyncio_mode = strict
+ filterwarnings =
+ default
+ """
+ ),
+ )
+ result = testdir.runpytest()
+ result.assert_outcomes(passed=1)
+ result.stdout.fnmatch_lines(
+ ["*is marked with '@pytest.mark.asyncio' but it is not an async function.*"]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_subprocess.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_subprocess.py
new file mode 100644
index 0000000000..79c5109dab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/test_subprocess.py
@@ -0,0 +1,36 @@
+"""Tests for using subprocesses in tests."""
+import asyncio.subprocess
+import sys
+
+import pytest
+
+if sys.platform == "win32":
+ # The default asyncio event loop implementation on Windows does not
+ # support subprocesses. Subprocesses are available for Windows if a
+ # ProactorEventLoop is used.
+ @pytest.yield_fixture()
+ def event_loop():
+ loop = asyncio.ProactorEventLoop()
+ yield loop
+ loop.close()
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 8),
+ reason="""
+ When run with Python 3.7 asyncio.subprocess.create_subprocess_exec seems to be
+ affected by an issue that prevents correct cleanup. Tests using pytest-trio
+ will report that signal handling is already performed by another library and
+ fail. [1] This is possibly a bug in CPython 3.7, so we ignore this test for
+ that Python version.
+
+ [1] https://github.com/python-trio/pytest-trio/issues/126
+ """,
+)
+@pytest.mark.asyncio
+async def test_subprocess(event_loop):
+ """Starting a subprocess should be possible."""
+ proc = await asyncio.subprocess.create_subprocess_exec(
+ sys.executable, "--version", stdout=asyncio.subprocess.PIPE
+ )
+ await proc.communicate()
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/trio/test_fixtures.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/trio/test_fixtures.py
new file mode 100644
index 0000000000..42b28437fd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tests/trio/test_fixtures.py
@@ -0,0 +1,25 @@
+from textwrap import dedent
+
+
+def test_strict_mode_ignores_trio_fixtures(testdir):
+ testdir.makepyfile(
+ dedent(
+ """\
+ import pytest
+ import pytest_asyncio
+ import pytest_trio
+
+ pytest_plugins = ["pytest_asyncio", "pytest_trio"]
+
+ @pytest_trio.trio_fixture
+ async def any_fixture():
+ return True
+
+ @pytest.mark.trio
+ async def test_anything(any_fixture):
+ pass
+ """
+ )
+ )
+ result = testdir.runpytest("--asyncio-mode=strict")
+ result.assert_outcomes(passed=1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tools/get-version.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tools/get-version.py
new file mode 100644
index 0000000000..e988a32cb9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tools/get-version.py
@@ -0,0 +1,17 @@
+import json
+import sys
+from importlib import metadata
+
+from packaging.version import parse as parse_version
+
+
+def main():
+ version_string = metadata.version("pytest-asyncio")
+ version = parse_version(version_string)
+ print(f"::set-output name=version::{version}")
+ prerelease = json.dumps(version.is_prerelease)
+ print(f"::set-output name=prerelease::{prerelease}")
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/tox.ini b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tox.ini
new file mode 100644
index 0000000000..1d8994ae4c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/tox.ini
@@ -0,0 +1,56 @@
+[tox]
+minversion = 3.14.0
+envlist = py37, py38, py39, py310, py311, lint, version-info, pytest-min
+isolated_build = true
+passenv =
+ CI
+
+[testenv]
+extras = testing
+deps =
+ --requirement dependencies/default/requirements.txt
+ --constraint dependencies/default/constraints.txt
+commands = make test
+allowlist_externals =
+ make
+
+[testenv:pytest-min]
+extras = testing
+deps =
+ --requirement dependencies/pytest-min/requirements.txt
+ --constraint dependencies/pytest-min/constraints.txt
+commands = make test
+allowlist_externals =
+ make
+
+[testenv:lint]
+basepython = python3.10
+extras = testing
+deps =
+ pre-commit == 2.16.0
+commands =
+ make lint
+allowlist_externals =
+ make
+
+[testenv:coverage-report]
+deps = coverage
+skip_install = true
+commands =
+ coverage combine
+ coverage report
+
+[testenv:version-info]
+deps =
+ packaging == 21.3
+commands =
+ python ./tools/get-version.py
+
+[gh-actions]
+python =
+ 3.7: py37, pytest-min
+ 3.8: py38
+ 3.9: py39
+ 3.10: py310
+ 3.11-dev: py311
+ pypy3: pypy3