summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/tools/third_party/attrs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /testing/web-platform/tests/tools/third_party/attrs
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/tools/third_party/attrs')
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md133
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md230
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml5
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md34
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md2
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml113
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.gitignore13
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml43
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml16
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst11
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst1027
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in24
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/README.rst135
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore0
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst35
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/conftest.py29
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/Makefile177
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.pngbin0 -> 7639 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg10
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg10
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/api.rst826
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst1
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst66
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/conf.py155
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst709
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst313
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst104
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst86
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst109
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/index.rst100
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/init.rst489
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/license.rst8
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/names.rst122
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst58
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/types.rst108
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/why.rst290
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/mypy.ini3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/pyproject.toml71
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/setup.py151
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py80
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi484
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py154
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi13
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py261
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py33
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py422
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py3173
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py216
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py87
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi9
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py155
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi13
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py94
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi17
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py54
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi6
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py79
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi19
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py561
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi78
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py70
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi63
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py45
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py198
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py31
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py671
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py510
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py52
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py45
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py163
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py1008
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py111
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py680
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py790
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py209
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py11
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py48
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py2462
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml1395
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py440
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py101
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py71
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py437
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py740
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py952
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py62
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py420
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/utils.py86
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tox.ini129
101 files changed, 23899 insertions, 0 deletions
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md b/testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..1d8ad1833e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md
@@ -0,0 +1,133 @@
+
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, caste, color, religion, or sexual
+identity and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+* Demonstrating empathy and kindness toward other people
+* Being respectful of differing opinions, viewpoints, and experiences
+* Giving and gracefully accepting constructive feedback
+* Accepting responsibility and apologizing to those affected by our mistakes,
+ and learning from the experience
+* Focusing on what is best not just for us as individuals, but for the overall
+ community
+
+Examples of unacceptable behavior include:
+
+* The use of sexualized language or imagery, and sexual attention or advances of
+ any kind
+* Trolling, insulting or derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or email address,
+ without their explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement at
+<mailto:hs@ox.cx>.
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series of
+actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or permanent
+ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior, harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within the
+community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.1, available at
+[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
+
+Community Impact Guidelines were inspired by
+[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
+
+For answers to common questions about this code of conduct, see the FAQ at
+[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
+[https://www.contributor-covenant.org/translations][translations].
+
+[homepage]: https://www.contributor-covenant.org
+[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
+[Mozilla CoC]: https://github.com/mozilla/diversity
+[FAQ]: https://www.contributor-covenant.org/faq
+[translations]: https://www.contributor-covenant.org/translations
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md b/testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md
new file mode 100644
index 0000000000..bbdc20f193
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md
@@ -0,0 +1,230 @@
+# How To Contribute
+
+First off, thank you for considering contributing to `attrs`!
+It's people like *you* who make it such a great tool for everyone.
+
+This document intends to make contribution more accessible by codifying tribal knowledge and expectations.
+Don't be afraid to open half-finished PRs, and ask questions if something is unclear!
+
+Please note that this project is released with a Contributor [Code of Conduct](https://github.com/python-attrs/attrs/blob/main/.github/CODE_OF_CONDUCT.md).
+By participating in this project you agree to abide by its terms.
+Please report any harm to [Hynek Schlawack] in any way you find appropriate.
+
+
+## Support
+
+In case you'd like to help out but don't want to deal with GitHub, there's a great opportunity:
+help your fellow developers on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs)!
+
+The official tag is `python-attrs` and helping out in support frees us up to improve `attrs` instead!
+
+
+## Workflow
+
+- No contribution is too small!
+ Please submit as many fixes for typos and grammar bloopers as you can!
+- Try to limit each pull request to *one* change only.
+- Since we squash on merge, it's up to you how you handle updates to the main branch.
+ Whether you prefer to rebase on main or merge main into your branch, do whatever is more comfortable for you.
+- *Always* add tests and docs for your code.
+ This is a hard rule; patches with missing tests or documentation can't be merged.
+- Make sure your changes pass our [CI].
+ You won't get any feedback until it's green unless you ask for it.
+- For the CI to pass, the coverage must be 100%.
+ If you have problems to test something, open anyway and ask for advice.
+ In some situations, we may agree to add an `# pragma: no cover`.
+- Once you've addressed review feedback, make sure to bump the pull request with a short note, so we know you're done.
+- Don’t break backwards compatibility.
+
+
+## Code
+
+- Obey [PEP 8](https://www.python.org/dev/peps/pep-0008/) and [PEP 257](https://www.python.org/dev/peps/pep-0257/).
+ We use the `"""`-on-separate-lines style for docstrings:
+
+ ```python
+ def func(x):
+ """
+ Do something.
+
+ :param str x: A very important parameter.
+
+ :rtype: str
+ """
+ ```
+- If you add or change public APIs, tag the docstring using `.. versionadded:: 16.0.0 WHAT` or `.. versionchanged:: 16.2.0 WHAT`.
+- We use [*isort*](https://github.com/PyCQA/isort) to sort our imports, and we use [*Black*](https://github.com/psf/black) with line length of 79 characters to format our code.
+ As long as you run our full [*tox*] suite before committing, or install our [*pre-commit*] hooks (ideally you'll do both – see [*Local Development Environment*](#local-development-environment) below), you won't have to spend any time on formatting your code at all.
+ If you don't, [CI] will catch it for you – but that seems like a waste of your time!
+
+
+## Tests
+
+- Write your asserts as `expected == actual` to line them up nicely:
+
+ ```python
+ x = f()
+
+ assert 42 == x.some_attribute
+ assert "foo" == x._a_private_attribute
+ ```
+
+- To run the test suite, all you need is a recent [*tox*].
+ It will ensure the test suite runs with all dependencies against all Python versions just as it will in our [CI].
+ If you lack some Python versions, you can can always limit the environments like `tox -e py27,py38`, or make it a non-failure using `tox --skip-missing-interpreters`.
+
+ In that case you should look into [*asdf*](https://asdf-vm.com) or [*pyenv*](https://github.com/pyenv/pyenv), which make it very easy to install many different Python versions in parallel.
+- Write [good test docstrings](https://jml.io/pages/test-docstrings.html).
+- To ensure new features work well with the rest of the system, they should be also added to our [*Hypothesis*](https://hypothesis.readthedocs.io/) testing strategy, which can be found in `tests/strategies.py`.
+- If you've changed or added public APIs, please update our type stubs (files ending in `.pyi`).
+
+
+## Documentation
+
+- Use [semantic newlines] in [*reStructuredText*] files (files ending in `.rst`):
+
+ ```rst
+ This is a sentence.
+ This is another sentence.
+ ```
+
+- If you start a new section, add two blank lines before and one blank line after the header, except if two headers follow immediately after each other:
+
+ ```rst
+ Last line of previous section.
+
+
+ Header of New Top Section
+ -------------------------
+
+ Header of New Section
+ ^^^^^^^^^^^^^^^^^^^^^
+
+ First line of new section.
+ ```
+
+- If you add a new feature, demonstrate its awesomeness on the [examples page](https://github.com/python-attrs/attrs/blob/main/docs/examples.rst)!
+
+
+### Changelog
+
+If your change is noteworthy, there needs to be a changelog entry so our users can learn about it!
+
+To avoid merge conflicts, we use the [*towncrier*](https://pypi.org/project/towncrier) package to manage our changelog.
+*towncrier* uses independent files for each pull request – so called *news fragments* – instead of one monolithic changelog file.
+On release, those news fragments are compiled into our [`CHANGELOG.rst`](https://github.com/python-attrs/attrs/blob/main/CHANGELOG.rst).
+
+You don't need to install *towncrier* yourself, you just have to abide by a few simple rules:
+
+- For each pull request, add a new file into `changelog.d` with a filename adhering to the `pr#.(change|deprecation|breaking).rst` schema:
+ For example, `changelog.d/42.change.rst` for a non-breaking change that is proposed in pull request #42.
+- As with other docs, please use [semantic newlines] within news fragments.
+- Wrap symbols like modules, functions, or classes into double backticks so they are rendered in a `monospace font`.
+- Wrap arguments into asterisks like in docstrings:
+ `Added new argument *an_argument*.`
+- If you mention functions or other callables, add parentheses at the end of their names:
+ `attrs.func()` or `attrs.Class.method()`.
+ This makes the changelog a lot more readable.
+- Prefer simple past tense or constructions with "now".
+ For example:
+
+ + Added `attrs.validators.func()`.
+ + `attrs.func()` now doesn't crash the Large Hadron Collider anymore when passed the *foobar* argument.
+- If you want to reference multiple issues, copy the news fragment to another filename.
+ *towncrier* will merge all news fragments with identical contents into one entry with multiple links to the respective pull requests.
+
+Example entries:
+
+ ```rst
+ Added ``attrs.validators.func()``.
+ The feature really *is* awesome.
+ ```
+
+or:
+
+ ```rst
+ ``attrs.func()`` now doesn't crash the Large Hadron Collider anymore when passed the *foobar* argument.
+ The bug really *was* nasty.
+ ```
+
+---
+
+``tox -e changelog`` will render the current changelog to the terminal if you have any doubts.
+
+
+## Local Development Environment
+
+You can (and should) run our test suite using [*tox*].
+However, you’ll probably want a more traditional environment as well.
+We highly recommend to develop using the latest Python release because we try to take advantage of modern features whenever possible.
+
+First create a [virtual environment](https://virtualenv.pypa.io/) so you don't break your system-wide Python installation.
+It’s out of scope for this document to list all the ways to manage virtual environments in Python, but if you don’t already have a pet way, take some time to look at tools like [*direnv*](https://hynek.me/til/python-project-local-venvs/), [*virtualfish*](https://virtualfish.readthedocs.io/), and [*virtualenvwrapper*](https://virtualenvwrapper.readthedocs.io/).
+
+Next, get an up to date checkout of the `attrs` repository:
+
+```console
+$ git clone git@github.com:python-attrs/attrs.git
+```
+
+or if you want to use git via `https`:
+
+```console
+$ git clone https://github.com/python-attrs/attrs.git
+```
+
+Change into the newly created directory and **after activating your virtual environment** install an editable version of `attrs` along with its tests and docs requirements:
+
+```console
+$ cd attrs
+$ pip install --upgrade pip setuptools # PLEASE don't skip this step
+$ pip install -e '.[dev]'
+```
+
+At this point,
+
+```console
+$ python -m pytest
+```
+
+should work and pass, as should:
+
+```console
+$ cd docs
+$ make html
+```
+
+The built documentation can then be found in `docs/_build/html/`.
+
+To avoid committing code that violates our style guide, we strongly advise you to install [*pre-commit*] [^dev] hooks:
+
+```console
+$ pre-commit install
+```
+
+You can also run them anytime (as our tox does) using:
+
+```console
+$ pre-commit run --all-files
+```
+
+[^dev]: *pre-commit* should have been installed into your virtualenv automatically when you ran `pip install -e '.[dev]'` above.
+ If *pre-commit* is missing, your probably need to run `pip install -e '.[dev]'` again.
+
+
+## Governance
+
+`attrs` is maintained by [team of volunteers](https://github.com/python-attrs) that is always open to new members that share our vision of a fast, lean, and magic-free library that empowers programmers to write better code with less effort.
+If you'd like to join, just get a pull request merged and ask to be added in the very same pull request!
+
+**The simple rule is that everyone is welcome to review/merge pull requests of others but nobody is allowed to merge their own code.**
+
+[Hynek Schlawack] acts reluctantly as the [BDFL](https://en.wikipedia.org/wiki/Benevolent_dictator_for_life) and has the final say over design decisions.
+
+
+[CI]: https://github.com/python-attrs/attrs/actions?query=workflow%3ACI
+[Hynek Schlawack]: https://hynek.me/about/
+[*pre-commit*]: https://pre-commit.com/
+[*tox*]: https://https://tox.wiki/
+[semantic newlines]: https://rhodesmill.org/brandon/2012/one-sentence-per-line/
+[*reStructuredText*]: https://www.sphinx-doc.org/en/stable/usage/restructuredtext/basics.html
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml b/testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml
new file mode 100644
index 0000000000..ef4f212162
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml
@@ -0,0 +1,5 @@
+---
+
+github: hynek
+ko_fi: the_hynek
+tidelift: "pypi/attrs"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md b/testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..88f6415e96
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,34 @@
+# Summary
+
+<!-- Please tell us what your pull request is about here. -->
+
+
+# Pull Request Check List
+
+<!--
+This is just a friendly reminder about the most common mistakes.
+Please make sure that you tick all boxes.
+But please read our [contribution guide](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) at least once, it will save you unnecessary review cycles!
+
+If an item doesn't apply to your pull request, **check it anyway** to make it apparent that there's nothing left to do.
+If your pull request is a documentation fix or a trivial typo, feel free to delete the whole thing.
+-->
+
+- [ ] Added **tests** for changed code.
+ Our CI fails if coverage is not 100%.
+- [ ] New features have been added to our [Hypothesis testing strategy](https://github.com/python-attrs/attrs/blob/main/tests/strategies.py).
+- [ ] Changes or additions to public APIs are reflected in our type stubs (files ending in ``.pyi``).
+ - [ ] ...and used in the stub test file `tests/typing_example.py`.
+ - [ ] If they've been added to `attr/__init__.pyi`, they've *also* been re-imported in `attrs/__init__.pyi`.
+- [ ] Updated **documentation** for changed code.
+ - [ ] New functions/classes have to be added to `docs/api.rst` by hand.
+ - [ ] Changes to the signature of `@attr.s()` have to be added by hand too.
+ - [ ] Changed/added classes/methods/functions have appropriate `versionadded`, `versionchanged`, or `deprecated` [directives](http://www.sphinx-doc.org/en/stable/markup/para.html#directive-versionadded).
+ Find the appropriate next version in our [``__init__.py``](https://github.com/python-attrs/attrs/blob/main/src/attr/__init__.py) file.
+- [ ] Documentation in `.rst` files is written using [semantic newlines](https://rhodesmill.org/brandon/2012/one-sentence-per-line/).
+- [ ] Changes (and possible deprecations) have news fragments in [`changelog.d`](https://github.com/python-attrs/attrs/blob/main/changelog.d).
+
+<!--
+If you have *any* questions to *any* of the points above, just **submit and ask**!
+This checklist is here to *help* you, not to deter you from contributing!
+-->
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md b/testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md
new file mode 100644
index 0000000000..5e565ec19c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md
@@ -0,0 +1,2 @@
+To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security).
+Tidelift will coordinate the fix and disclosure.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml b/testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml
new file mode 100644
index 0000000000..f38fd91509
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml
@@ -0,0 +1,113 @@
+---
+name: CI
+
+on:
+ push:
+ branches: ["main"]
+ pull_request:
+ branches: ["main"]
+ workflow_dispatch:
+
+env:
+ FORCE_COLOR: "1" # Make tools pretty.
+ TOX_TESTENV_PASSENV: FORCE_COLOR
+ PYTHON_LATEST: "3.10"
+
+
+jobs:
+ tests:
+ name: tox on ${{ matrix.python-version }}
+ runs-on: ubuntu-latest
+
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "pypy-2.7", "pypy-3.7", "pypy-3.8"]
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: "Install dependencies"
+ run: |
+ python -VV
+ python -m site
+ python -m pip install --upgrade pip setuptools wheel
+ python -m pip install --upgrade virtualenv tox tox-gh-actions
+
+ - run: "python -m tox"
+
+ - name: Upload coverage data
+ uses: "actions/upload-artifact@v2"
+ with:
+ name: coverage-data
+ path: ".coverage.*"
+ if-no-files-found: ignore
+
+
+ coverage:
+ runs-on: ubuntu-latest
+ needs: tests
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ # Use latest Python, so it understands all syntax.
+ python-version: ${{env.PYTHON_LATEST}}
+
+ - run: python -m pip install --upgrade coverage[toml]
+
+ - name: Download coverage data
+ uses: actions/download-artifact@v2
+ with:
+ name: coverage-data
+
+ - name: Combine coverage and fail if it's <100%.
+ run: |
+ python -m coverage combine
+ python -m coverage html --skip-covered --skip-empty
+ python -m coverage report --fail-under=100
+
+ - name: Upload HTML report if check failed.
+ uses: actions/upload-artifact@v2
+ with:
+ name: html-report
+ path: htmlcov
+ if: ${{ failure() }}
+
+
+ package:
+ name: Build & verify package
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{env.PYTHON_LATEST}}
+
+ - run: python -m pip install build twine check-wheel-contents
+ - run: python -m build --sdist --wheel .
+ - run: ls -l dist
+ - run: check-wheel-contents dist/*.whl
+ - name: Check long_description
+ run: python -m twine check dist/*
+
+
+ install-dev:
+ name: Verify dev env
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os: ["ubuntu-latest", "windows-latest"]
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{env.PYTHON_LATEST}}
+ - run: python -m pip install -e .[dev]
+ - run: python -c 'import attr; print(attr.__version__)'
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.gitignore b/testing/web-platform/tests/tools/third_party/attrs/.gitignore
new file mode 100644
index 0000000000..d054dc6267
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.gitignore
@@ -0,0 +1,13 @@
+*.egg-info
+*.pyc
+.cache
+.coverage*
+.hypothesis
+.mypy_cache
+.pytest_cache
+.tox
+build
+dist
+docs/_build/
+htmlcov
+pip-wheel-metadata
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml b/testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml
new file mode 100644
index 0000000000..a913b068f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml
@@ -0,0 +1,43 @@
+---
+ci:
+ autoupdate_schedule: monthly
+
+repos:
+ - repo: https://github.com/psf/black
+ rev: 21.12b0
+ hooks:
+ - id: black
+ exclude: tests/test_pattern_matching.py
+ language_version: python3.10
+
+ - repo: https://github.com/PyCQA/isort
+ rev: 5.10.1
+ hooks:
+ - id: isort
+ additional_dependencies: [toml]
+ files: \.py$
+ language_version: python3.10
+
+ - repo: https://github.com/PyCQA/flake8
+ rev: 4.0.1
+ hooks:
+ - id: flake8
+ language_version: python3.10
+
+ - repo: https://github.com/econchick/interrogate
+ rev: 1.5.0
+ hooks:
+ - id: interrogate
+ exclude: tests/test_pattern_matching.py
+ args: [tests]
+ language_version: python3.10
+
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.1.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: debug-statements
+ language_version: python3.10
+ - id: check-toml
+ - id: check-yaml
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml b/testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml
new file mode 100644
index 0000000000..d335c40d56
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml
@@ -0,0 +1,16 @@
+---
+version: 2
+formats: all
+
+build:
+ os: ubuntu-20.04
+ tools:
+ # Keep version in sync with tox.ini (docs and gh-actions).
+ python: "3.10"
+
+python:
+ install:
+ - method: pip
+ path: .
+ extra_requirements:
+ - docs
diff --git a/testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst b/testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst
new file mode 100644
index 0000000000..f14ef6c607
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst
@@ -0,0 +1,11 @@
+Credits
+=======
+
+``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
+
+The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
+
+A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
+
+It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
+Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
diff --git a/testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst
new file mode 100644
index 0000000000..1d194add22
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst
@@ -0,0 +1,1027 @@
+Changelog
+=========
+
+Versions follow `CalVer <https://calver.org>`_ with a strict backwards-compatibility policy.
+
+The **first number** of the version is the year.
+The **second number** is incremented with each release, starting at 1 for each year.
+The **third number** is when we need to start branches for older releases (only for emergencies).
+
+Put simply, you shouldn't ever be afraid to upgrade ``attrs`` if you're only using its public APIs.
+Whenever there is a need to break compatibility, it is announced here in the changelog, and raises a ``DeprecationWarning`` for a year (if possible) before it's finally really broken.
+
+.. warning::
+
+ The structure of the `attrs.Attribute` class is exempt from this rule.
+ It *will* change in the future, but since it should be considered read-only, that shouldn't matter.
+
+ However if you intend to build extensions on top of ``attrs`` you have to anticipate that.
+
+.. towncrier release notes start
+
+21.4.0 (2021-12-29)
+-------------------
+
+Changes
+^^^^^^^
+
+- Fixed the test suite on PyPy3.8 where ``cloudpickle`` does not work.
+ `#892 <https://github.com/python-attrs/attrs/issues/892>`_
+- Fixed ``coverage report`` for projects that use ``attrs`` and don't set a ``--source``.
+ `#895 <https://github.com/python-attrs/attrs/issues/895>`_,
+ `#896 <https://github.com/python-attrs/attrs/issues/896>`_
+
+
+----
+
+
+21.3.0 (2021-12-28)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- When using ``@define``, converters are now run by default when setting an attribute on an instance -- additionally to validators.
+ I.e. the new default is ``on_setattr=[attrs.setters.convert, attrs.setters.validate]``.
+
+ This is unfortunately a breaking change, but it was an oversight, impossible to raise a ``DeprecationWarning`` about, and it's better to fix it now while the APIs are very fresh with few users.
+ `#835 <https://github.com/python-attrs/attrs/issues/835>`_,
+ `#886 <https://github.com/python-attrs/attrs/issues/886>`_
+- ``import attrs`` has finally landed!
+ As of this release, you can finally import ``attrs`` using its proper name.
+
+ Not all names from the ``attr`` namespace have been transferred; most notably ``attr.s`` and ``attr.ib`` are missing.
+ See ``attrs.define`` and ``attrs.field`` if you haven't seen our next-generation APIs yet.
+ A more elaborate explanation can be found `On The Core API Names <https://www.attrs.org/en/latest/names.html>`_
+
+ This feature is at least for one release **provisional**.
+ We don't *plan* on changing anything, but such a big change is unlikely to go perfectly on the first strike.
+
+ The API docs have been mostly updated, but it will be an ongoing effort to change everything to the new APIs.
+ Please note that we have **not** moved -- or even removed -- anything from ``attr``!
+
+ Please do report any bugs or documentation inconsistencies!
+ `#887 <https://github.com/python-attrs/attrs/issues/887>`_
+
+
+Changes
+^^^^^^^
+
+- ``attr.asdict(retain_collection_types=False)`` (default) dumps collection-esque keys as tuples.
+ `#646 <https://github.com/python-attrs/attrs/issues/646>`_,
+ `#888 <https://github.com/python-attrs/attrs/issues/888>`_
+- ``__match_args__`` are now generated to support Python 3.10's
+ `Structural Pattern Matching <https://docs.python.org/3.10/whatsnew/3.10.html#pep-634-structural-pattern-matching>`_.
+ This can be controlled by the ``match_args`` argument to the class decorators on Python 3.10 and later.
+ On older versions, it is never added and the argument is ignored.
+ `#815 <https://github.com/python-attrs/attrs/issues/815>`_
+- If the class-level *on_setattr* is set to ``attrs.setters.validate`` (default in ``@define`` and ``@mutable``) but no field defines a validator, pretend that it's not set.
+ `#817 <https://github.com/python-attrs/attrs/issues/817>`_
+- The generated ``__repr__`` is significantly faster on Pythons with f-strings.
+ `#819 <https://github.com/python-attrs/attrs/issues/819>`_
+- Attributes transformed via ``field_transformer`` are wrapped with ``AttrsClass`` again.
+ `#824 <https://github.com/python-attrs/attrs/issues/824>`_
+- Generated source code is now cached more efficiently for identical classes.
+ `#828 <https://github.com/python-attrs/attrs/issues/828>`_
+- Added ``attrs.converters.to_bool()``.
+ `#830 <https://github.com/python-attrs/attrs/issues/830>`_
+- ``attrs.resolve_types()`` now resolves types of subclasses after the parents are resolved.
+ `#842 <https://github.com/python-attrs/attrs/issues/842>`_
+ `#843 <https://github.com/python-attrs/attrs/issues/843>`_
+- Added new validators: ``lt(val)`` (< val), ``le(va)`` (≤ val), ``ge(val)`` (≥ val), ``gt(val)`` (> val), and ``maxlen(n)``.
+ `#845 <https://github.com/python-attrs/attrs/issues/845>`_
+- ``attrs`` classes are now fully compatible with `cloudpickle <https://github.com/cloudpipe/cloudpickle>`_ (no need to disable ``repr`` anymore).
+ `#857 <https://github.com/python-attrs/attrs/issues/857>`_
+- Added new context manager ``attrs.validators.disabled()`` and functions ``attrs.validators.(set|get)_disabled()``.
+ They deprecate ``attrs.(set|get)_run_validators()``.
+ All functions are interoperable and modify the same internal state.
+ They are not – and never were – thread-safe, though.
+ `#859 <https://github.com/python-attrs/attrs/issues/859>`_
+- ``attrs.validators.matches_re()`` now accepts pre-compiled regular expressions in addition to pattern strings.
+ `#877 <https://github.com/python-attrs/attrs/issues/877>`_
+
+
+----
+
+
+21.2.0 (2021-05-07)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- We had to revert the recursive feature for ``attr.evolve()`` because it broke some use-cases -- sorry!
+ `#806 <https://github.com/python-attrs/attrs/issues/806>`_
+- Python 3.4 is now blocked using packaging metadata because ``attrs`` can't be imported on it anymore.
+ To ensure that 3.4 users can keep installing ``attrs`` easily, we will `yank <https://pypi.org/help/#yanked>`_ 21.1.0 from PyPI.
+ This has **no** consequences if you pin ``attrs`` to 21.1.0.
+ `#807 <https://github.com/python-attrs/attrs/issues/807>`_
+
+
+----
+
+
+21.1.0 (2021-05-06)
+-------------------
+
+Deprecations
+^^^^^^^^^^^^
+
+- The long-awaited, much-talked-about, little-delivered ``import attrs`` is finally upon us!
+
+ Since the NG APIs have now been proclaimed stable, the **next** release of ``attrs`` will allow you to actually ``import attrs``.
+ We're taking this opportunity to replace some defaults in our APIs that made sense in 2015, but don't in 2021.
+
+ So please, if you have any pet peeves about defaults in ``attrs``'s APIs, *now* is the time to air your grievances in #487!
+ We're not gonna get such a chance for a second time, without breaking our backward-compatibility guarantees, or long deprecation cycles.
+ Therefore, speak now or forever hold you peace!
+ `#487 <https://github.com/python-attrs/attrs/issues/487>`_
+- The *cmp* argument to ``attr.s()`` and `attr.ib()` has been **undeprecated**
+ It will continue to be supported as syntactic sugar to set *eq* and *order* in one go.
+
+ I'm terribly sorry for the hassle around this argument!
+ The reason we're bringing it back is it's usefulness regarding customization of equality/ordering.
+
+ The ``cmp`` attribute and argument on ``attr.Attribute`` remains deprecated and will be removed later this year.
+ `#773 <https://github.com/python-attrs/attrs/issues/773>`_
+
+
+Changes
+^^^^^^^
+
+- It's now possible to customize the behavior of ``eq`` and ``order`` by passing in a callable.
+ `#435 <https://github.com/python-attrs/attrs/issues/435>`_,
+ `#627 <https://github.com/python-attrs/attrs/issues/627>`_
+- The instant favorite next-generation APIs are not provisional anymore!
+
+ They are also officially supported by Mypy as of their `0.800 release <https://mypy-lang.blogspot.com/2021/01/mypy-0800-released.html>`_.
+
+ We hope the next release will already contain an (additional) importable package called ``attrs``.
+ `#668 <https://github.com/python-attrs/attrs/issues/668>`_,
+ `#786 <https://github.com/python-attrs/attrs/issues/786>`_
+- If an attribute defines a converter, the type of its parameter is used as type annotation for its corresponding ``__init__`` parameter.
+
+ If an ``attr.converters.pipe`` is used, the first one's is used.
+ `#710 <https://github.com/python-attrs/attrs/issues/710>`_
+- Fixed the creation of an extra slot for an ``attr.ib`` when the parent class already has a slot with the same name.
+ `#718 <https://github.com/python-attrs/attrs/issues/718>`_
+- ``__attrs__init__()`` will now be injected if ``init=False``, or if ``auto_detect=True`` and a user-defined ``__init__()`` exists.
+
+ This enables users to do "pre-init" work in their ``__init__()`` (such as ``super().__init__()``).
+
+ ``__init__()`` can then delegate constructor argument processing to ``self.__attrs_init__(*args, **kwargs)``.
+ `#731 <https://github.com/python-attrs/attrs/issues/731>`_
+- ``bool(attr.NOTHING)`` is now ``False``.
+ `#732 <https://github.com/python-attrs/attrs/issues/732>`_
+- It's now possible to use ``super()`` inside of properties of slotted classes.
+ `#747 <https://github.com/python-attrs/attrs/issues/747>`_
+- Allow for a ``__attrs_pre_init__()`` method that -- if defined -- will get called at the beginning of the ``attrs``-generated ``__init__()`` method.
+ `#750 <https://github.com/python-attrs/attrs/issues/750>`_
+- Added forgotten ``attr.Attribute.evolve()`` to type stubs.
+ `#752 <https://github.com/python-attrs/attrs/issues/752>`_
+- ``attrs.evolve()`` now works recursively with nested ``attrs`` classes.
+ `#759 <https://github.com/python-attrs/attrs/issues/759>`_
+- Python 3.10 is now officially supported.
+ `#763 <https://github.com/python-attrs/attrs/issues/763>`_
+- ``attr.resolve_types()`` now takes an optional *attrib* argument to work inside a ``field_transformer``.
+ `#774 <https://github.com/python-attrs/attrs/issues/774>`_
+- ``ClassVar``\ s are now also detected if they come from `typing-extensions <https://pypi.org/project/typing-extensions/>`_.
+ `#782 <https://github.com/python-attrs/attrs/issues/782>`_
+- To make it easier to customize attribute comparison (#435), we have added the ``attr.cmp_with()`` helper.
+
+ See the `new docs on comparison <https://www.attrs.org/en/stable/comparison.html>`_ for more details.
+ `#787 <https://github.com/python-attrs/attrs/issues/787>`_
+- Added **provisional** support for static typing in ``pyright`` via the `dataclass_transforms specification <https://github.com/microsoft/pyright/blob/main/specs/dataclass_transforms.md>`_.
+ Both the ``pyright`` specification and ``attrs`` implementation may change in future versions of both projects.
+
+ Your constructive feedback is welcome in both `attrs#795 <https://github.com/python-attrs/attrs/issues/795>`_ and `pyright#1782 <https://github.com/microsoft/pyright/discussions/1782>`_.
+ `#796 <https://github.com/python-attrs/attrs/issues/796>`_
+
+
+----
+
+
+20.3.0 (2020-11-05)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- ``attr.define()``, ``attr.frozen()``, ``attr.mutable()``, and ``attr.field()`` remain **provisional**.
+
+ This release does **not** change anything about them and they are already used widely in production though.
+
+ If you wish to use them together with mypy, you can simply drop `this plugin <https://gist.github.com/hynek/1e3844d0c99e479e716169034b5fa963#file-attrs_ng_plugin-py>`_ into your project.
+
+ Feel free to provide feedback to them in the linked issue #668.
+
+ We will release the ``attrs`` namespace once we have the feeling that the APIs have properly settled.
+ `#668 <https://github.com/python-attrs/attrs/issues/668>`_
+
+
+Changes
+^^^^^^^
+
+- ``attr.s()`` now has a *field_transformer* hook that is called for all ``Attribute``\ s and returns a (modified or updated) list of ``Attribute`` instances.
+ ``attr.asdict()`` has a *value_serializer* hook that can change the way values are converted.
+ Both hooks are meant to help with data (de-)serialization workflows.
+ `#653 <https://github.com/python-attrs/attrs/issues/653>`_
+- ``kw_only=True`` now works on Python 2.
+ `#700 <https://github.com/python-attrs/attrs/issues/700>`_
+- ``raise from`` now works on frozen classes on PyPy.
+ `#703 <https://github.com/python-attrs/attrs/issues/703>`_,
+ `#712 <https://github.com/python-attrs/attrs/issues/712>`_
+- ``attr.asdict()`` and ``attr.astuple()`` now treat ``frozenset``\ s like ``set``\ s with regards to the *retain_collection_types* argument.
+ `#704 <https://github.com/python-attrs/attrs/issues/704>`_
+- The type stubs for ``attr.s()`` and ``attr.make_class()`` are not missing the *collect_by_mro* argument anymore.
+ `#711 <https://github.com/python-attrs/attrs/issues/711>`_
+
+
+----
+
+
+20.2.0 (2020-09-05)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- ``attr.define()``, ``attr.frozen()``, ``attr.mutable()``, and ``attr.field()`` remain **provisional**.
+
+ This release fixes a bunch of bugs and ergonomics but they remain mostly unchanged.
+
+ If you wish to use them together with mypy, you can simply drop `this plugin <https://gist.github.com/hynek/1e3844d0c99e479e716169034b5fa963#file-attrs_ng_plugin-py>`_ into your project.
+
+ Feel free to provide feedback to them in the linked issue #668.
+
+ We will release the ``attrs`` namespace once we have the feeling that the APIs have properly settled.
+ `#668 <https://github.com/python-attrs/attrs/issues/668>`_
+
+
+Changes
+^^^^^^^
+
+- ``attr.define()`` et al now correct detect ``__eq__`` and ``__ne__``.
+ `#671 <https://github.com/python-attrs/attrs/issues/671>`_
+- ``attr.define()`` et al's hybrid behavior now also works correctly when arguments are passed.
+ `#675 <https://github.com/python-attrs/attrs/issues/675>`_
+- It's possible to define custom ``__setattr__`` methods on slotted classes again.
+ `#681 <https://github.com/python-attrs/attrs/issues/681>`_
+- In 20.1.0 we introduced the ``inherited`` attribute on the ``attr.Attribute`` class to differentiate attributes that have been inherited and those that have been defined directly on the class.
+
+ It has shown to be problematic to involve that attribute when comparing instances of ``attr.Attribute`` though, because when sub-classing, attributes from base classes are suddenly not equal to themselves in a super class.
+
+ Therefore the ``inherited`` attribute will now be ignored when hashing and comparing instances of ``attr.Attribute``.
+ `#684 <https://github.com/python-attrs/attrs/issues/684>`_
+- ``zope.interface`` is now a "soft dependency" when running the test suite; if ``zope.interface`` is not installed when running the test suite, the interface-related tests will be automatically skipped.
+ `#685 <https://github.com/python-attrs/attrs/issues/685>`_
+- The ergonomics of creating frozen classes using ``@define(frozen=True)`` and sub-classing frozen classes has been improved:
+ you don't have to set ``on_setattr=None`` anymore.
+ `#687 <https://github.com/python-attrs/attrs/issues/687>`_
+
+
+----
+
+
+20.1.0 (2020-08-20)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Python 3.4 is not supported anymore.
+ It has been unsupported by the Python core team for a while now, its PyPI downloads are negligible, and our CI provider removed it as a supported option.
+
+ It's very unlikely that ``attrs`` will break under 3.4 anytime soon, which is why we do *not* block its installation on Python 3.4.
+ But we don't test it anymore and will block it once someone reports breakage.
+ `#608 <https://github.com/python-attrs/attrs/issues/608>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- Less of a deprecation and more of a heads up: the next release of ``attrs`` will introduce an ``attrs`` namespace.
+ That means that you'll finally be able to run ``import attrs`` with new functions that aren't cute abbreviations and that will carry better defaults.
+
+ This should not break any of your code, because project-local packages have priority before installed ones.
+ If this is a problem for you for some reason, please report it to our bug tracker and we'll figure something out.
+
+ The old ``attr`` namespace isn't going anywhere and its defaults are not changing – this is a purely additive measure.
+ Please check out the linked issue for more details.
+
+ These new APIs have been added *provisionally* as part of #666 so you can try them out today and provide feedback.
+ Learn more in the `API docs <https://www.attrs.org/en/stable/api.html>`_.
+ `#408 <https://github.com/python-attrs/attrs/issues/408>`_
+
+
+Changes
+^^^^^^^
+
+- Added ``attr.resolve_types()``.
+ It ensures that all forward-references and types in string form are resolved into concrete types.
+
+ You need this only if you need concrete types at runtime.
+ That means that if you only use types for static type checking, you do **not** need this function.
+ `#288 <https://github.com/python-attrs/attrs/issues/288>`_,
+ `#302 <https://github.com/python-attrs/attrs/issues/302>`_
+- Added ``@attr.s(collect_by_mro=False)`` argument that if set to ``True`` fixes the collection of attributes from base classes.
+
+ It's only necessary for certain cases of multiple-inheritance but is kept off for now for backward-compatibility reasons.
+ It will be turned on by default in the future.
+
+ As a side-effect, ``attr.Attribute`` now *always* has an ``inherited`` attribute indicating whether an attribute on a class was directly defined or inherited.
+ `#428 <https://github.com/python-attrs/attrs/issues/428>`_,
+ `#635 <https://github.com/python-attrs/attrs/issues/635>`_
+- On Python 3, all generated methods now have a docstring explaining that they have been created by ``attrs``.
+ `#506 <https://github.com/python-attrs/attrs/issues/506>`_
+- It is now possible to prevent ``attrs`` from auto-generating the ``__setstate__`` and ``__getstate__`` methods that are required for pickling of slotted classes.
+
+ Either pass ``@attr.s(getstate_setstate=False)`` or pass ``@attr.s(auto_detect=True)`` and implement them yourself:
+ if ``attrs`` finds either of the two methods directly on the decorated class, it assumes implicitly ``getstate_setstate=False`` (and implements neither).
+
+ This option works with dict classes but should never be necessary.
+ `#512 <https://github.com/python-attrs/attrs/issues/512>`_,
+ `#513 <https://github.com/python-attrs/attrs/issues/513>`_,
+ `#642 <https://github.com/python-attrs/attrs/issues/642>`_
+- Fixed a ``ValueError: Cell is empty`` bug that could happen in some rare edge cases.
+ `#590 <https://github.com/python-attrs/attrs/issues/590>`_
+- ``attrs`` can now automatically detect your own implementations and infer ``init=False``, ``repr=False``, ``eq=False``, ``order=False``, and ``hash=False`` if you set ``@attr.s(auto_detect=True)``.
+ ``attrs`` will ignore inherited methods.
+ If the argument implies more than one method (e.g. ``eq=True`` creates both ``__eq__`` and ``__ne__``), it's enough for *one* of them to exist and ``attrs`` will create *neither*.
+
+ This feature requires Python 3.
+ `#607 <https://github.com/python-attrs/attrs/issues/607>`_
+- Added ``attr.converters.pipe()``.
+ The feature allows combining multiple conversion callbacks into one by piping the value through all of them, and retuning the last result.
+
+ As part of this feature, we had to relax the type information for converter callables.
+ `#618 <https://github.com/python-attrs/attrs/issues/618>`_
+- Fixed serialization behavior of non-slots classes with ``cache_hash=True``.
+ The hash cache will be cleared on operations which make "deep copies" of instances of classes with hash caching,
+ though the cache will not be cleared with shallow copies like those made by ``copy.copy()``.
+
+ Previously, ``copy.deepcopy()`` or serialization and deserialization with ``pickle`` would result in an un-initialized object.
+
+ This change also allows the creation of ``cache_hash=True`` classes with a custom ``__setstate__``,
+ which was previously forbidden (`#494 <https://github.com/python-attrs/attrs/issues/494>`_).
+ `#620 <https://github.com/python-attrs/attrs/issues/620>`_
+- It is now possible to specify hooks that are called whenever an attribute is set **after** a class has been instantiated.
+
+ You can pass ``on_setattr`` both to ``@attr.s()`` to set the default for all attributes on a class, and to ``@attr.ib()`` to overwrite it for individual attributes.
+
+ ``attrs`` also comes with a new module ``attr.setters`` that brings helpers that run validators, converters, or allow to freeze a subset of attributes.
+ `#645 <https://github.com/python-attrs/attrs/issues/645>`_,
+ `#660 <https://github.com/python-attrs/attrs/issues/660>`_
+- **Provisional** APIs called ``attr.define()``, ``attr.mutable()``, and ``attr.frozen()`` have been added.
+
+ They are only available on Python 3.6 and later, and call ``attr.s()`` with different default values.
+
+ If nothing comes up, they will become the official way for creating classes in 20.2.0 (see above).
+
+ **Please note** that it may take some time until mypy – and other tools that have dedicated support for ``attrs`` – recognize these new APIs.
+ Please **do not** open issues on our bug tracker, there is nothing we can do about it.
+ `#666 <https://github.com/python-attrs/attrs/issues/666>`_
+- We have also provisionally added ``attr.field()`` that supplants ``attr.ib()``.
+ It also requires at least Python 3.6 and is keyword-only.
+ Other than that, it only dropped a few arguments, but changed no defaults.
+
+ As with ``attr.s()``: ``attr.ib()`` is not going anywhere.
+ `#669 <https://github.com/python-attrs/attrs/issues/669>`_
+
+
+----
+
+
+19.3.0 (2019-10-15)
+-------------------
+
+Changes
+^^^^^^^
+
+- Fixed ``auto_attribs`` usage when default values cannot be compared directly with ``==``, such as ``numpy`` arrays.
+ `#585 <https://github.com/python-attrs/attrs/issues/585>`_
+
+
+----
+
+
+19.2.0 (2019-10-01)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Removed deprecated ``Attribute`` attribute ``convert`` per scheduled removal on 2019/1.
+ This planned deprecation is tracked in issue `#307 <https://github.com/python-attrs/attrs/issues/307>`_.
+ `#504 <https://github.com/python-attrs/attrs/issues/504>`_
+- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` do not consider subclasses comparable anymore.
+
+ This has been deprecated since 18.2.0 and was raising a ``DeprecationWarning`` for over a year.
+ `#570 <https://github.com/python-attrs/attrs/issues/570>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- The ``cmp`` argument to ``attr.s()`` and ``attr.ib()`` is now deprecated.
+
+ Please use ``eq`` to add equality methods (``__eq__`` and ``__ne__``) and ``order`` to add ordering methods (``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``) instead – just like with `dataclasses <https://docs.python.org/3/library/dataclasses.html>`_.
+
+ Both are effectively ``True`` by default but it's enough to set ``eq=False`` to disable both at once.
+ Passing ``eq=False, order=True`` explicitly will raise a ``ValueError`` though.
+
+ Since this is arguably a deeper backward-compatibility break, it will have an extended deprecation period until 2021-06-01.
+ After that day, the ``cmp`` argument will be removed.
+
+ ``attr.Attribute`` also isn't orderable anymore.
+ `#574 <https://github.com/python-attrs/attrs/issues/574>`_
+
+
+Changes
+^^^^^^^
+
+- Updated ``attr.validators.__all__`` to include new validators added in `#425`_.
+ `#517 <https://github.com/python-attrs/attrs/issues/517>`_
+- Slotted classes now use a pure Python mechanism to rewrite the ``__class__`` cell when rebuilding the class, so ``super()`` works even on environments where ``ctypes`` is not installed.
+ `#522 <https://github.com/python-attrs/attrs/issues/522>`_
+- When collecting attributes using ``@attr.s(auto_attribs=True)``, attributes with a default of ``None`` are now deleted too.
+ `#523 <https://github.com/python-attrs/attrs/issues/523>`_,
+ `#556 <https://github.com/python-attrs/attrs/issues/556>`_
+- Fixed ``attr.validators.deep_iterable()`` and ``attr.validators.deep_mapping()`` type stubs.
+ `#533 <https://github.com/python-attrs/attrs/issues/533>`_
+- ``attr.validators.is_callable()`` validator now raises an exception ``attr.exceptions.NotCallableError``, a subclass of ``TypeError``, informing the received value.
+ `#536 <https://github.com/python-attrs/attrs/issues/536>`_
+- ``@attr.s(auto_exc=True)`` now generates classes that are hashable by ID, as the documentation always claimed it would.
+ `#543 <https://github.com/python-attrs/attrs/issues/543>`_,
+ `#563 <https://github.com/python-attrs/attrs/issues/563>`_
+- Added ``attr.validators.matches_re()`` that checks string attributes whether they match a regular expression.
+ `#552 <https://github.com/python-attrs/attrs/issues/552>`_
+- Keyword-only attributes (``kw_only=True``) and attributes that are excluded from the ``attrs``'s ``__init__`` (``init=False``) now can appear before mandatory attributes.
+ `#559 <https://github.com/python-attrs/attrs/issues/559>`_
+- The fake filename for generated methods is now more stable.
+ It won't change when you restart the process.
+ `#560 <https://github.com/python-attrs/attrs/issues/560>`_
+- The value passed to ``@attr.ib(repr=…)`` can now be either a boolean (as before) or a callable.
+ That callable must return a string and is then used for formatting the attribute by the generated ``__repr__()`` method.
+ `#568 <https://github.com/python-attrs/attrs/issues/568>`_
+- Added ``attr.__version_info__`` that can be used to reliably check the version of ``attrs`` and write forward- and backward-compatible code.
+ Please check out the `section on deprecated APIs <http://www.attrs.org/en/stable/api.html#deprecated-apis>`_ on how to use it.
+ `#580 <https://github.com/python-attrs/attrs/issues/580>`_
+
+ .. _`#425`: https://github.com/python-attrs/attrs/issues/425
+
+
+----
+
+
+19.1.0 (2019-03-03)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Fixed a bug where deserialized objects with ``cache_hash=True`` could have incorrect hash code values.
+ This change breaks classes with ``cache_hash=True`` when a custom ``__setstate__`` is present.
+ An exception will be thrown when applying the ``attrs`` annotation to such a class.
+ This limitation is tracked in issue `#494 <https://github.com/python-attrs/attrs/issues/494>`_.
+ `#482 <https://github.com/python-attrs/attrs/issues/482>`_
+
+
+Changes
+^^^^^^^
+
+- Add ``is_callable``, ``deep_iterable``, and ``deep_mapping`` validators.
+
+ * ``is_callable``: validates that a value is callable
+ * ``deep_iterable``: Allows recursion down into an iterable,
+ applying another validator to every member in the iterable
+ as well as applying an optional validator to the iterable itself.
+ * ``deep_mapping``: Allows recursion down into the items in a mapping object,
+ applying a key validator and a value validator to the key and value in every item.
+ Also applies an optional validator to the mapping object itself.
+
+ You can find them in the ``attr.validators`` package.
+ `#425`_
+- Fixed stub files to prevent errors raised by mypy's ``disallow_any_generics = True`` option.
+ `#443 <https://github.com/python-attrs/attrs/issues/443>`_
+- Attributes with ``init=False`` now can follow after ``kw_only=True`` attributes.
+ `#450 <https://github.com/python-attrs/attrs/issues/450>`_
+- ``attrs`` now has first class support for defining exception classes.
+
+ If you define a class using ``@attr.s(auto_exc=True)`` and subclass an exception, the class will behave like a well-behaved exception class including an appropriate ``__str__`` method, and all attributes additionally available in an ``args`` attribute.
+ `#500 <https://github.com/python-attrs/attrs/issues/500>`_
+- Clarified documentation for hashing to warn that hashable objects should be deeply immutable (in their usage, even if this is not enforced).
+ `#503 <https://github.com/python-attrs/attrs/issues/503>`_
+
+
+----
+
+
+18.2.0 (2018-09-01)
+-------------------
+
+Deprecations
+^^^^^^^^^^^^
+
+- Comparing subclasses using ``<``, ``>``, ``<=``, and ``>=`` is now deprecated.
+ The docs always claimed that instances are only compared if the types are identical, so this is a first step to conform to the docs.
+
+ Equality operators (``==`` and ``!=``) were always strict in this regard.
+ `#394 <https://github.com/python-attrs/attrs/issues/394>`_
+
+
+Changes
+^^^^^^^
+
+- ``attrs`` now ships its own `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type hints.
+ Together with `mypy <http://mypy-lang.org>`_'s ``attrs`` plugin, you've got all you need for writing statically typed code in both Python 2 and 3!
+
+ At that occasion, we've also added `narrative docs <https://www.attrs.org/en/stable/types.html>`_ about type annotations in ``attrs``.
+ `#238 <https://github.com/python-attrs/attrs/issues/238>`_
+- Added *kw_only* arguments to ``attr.ib`` and ``attr.s``, and a corresponding *kw_only* attribute to ``attr.Attribute``.
+ This change makes it possible to have a generated ``__init__`` with keyword-only arguments on Python 3, relaxing the required ordering of default and non-default valued attributes.
+ `#281 <https://github.com/python-attrs/attrs/issues/281>`_,
+ `#411 <https://github.com/python-attrs/attrs/issues/411>`_
+- The test suite now runs with ``hypothesis.HealthCheck.too_slow`` disabled to prevent CI breakage on slower computers.
+ `#364 <https://github.com/python-attrs/attrs/issues/364>`_,
+ `#396 <https://github.com/python-attrs/attrs/issues/396>`_
+- ``attr.validators.in_()`` now raises a ``ValueError`` with a useful message even if the options are a string and the value is not a string.
+ `#383 <https://github.com/python-attrs/attrs/issues/383>`_
+- ``attr.asdict()`` now properly handles deeply nested lists and dictionaries.
+ `#395 <https://github.com/python-attrs/attrs/issues/395>`_
+- Added ``attr.converters.default_if_none()`` that allows to replace ``None`` values in attributes.
+ For example ``attr.ib(converter=default_if_none(""))`` replaces ``None`` by empty strings.
+ `#400 <https://github.com/python-attrs/attrs/issues/400>`_,
+ `#414 <https://github.com/python-attrs/attrs/issues/414>`_
+- Fixed a reference leak where the original class would remain live after being replaced when ``slots=True`` is set.
+ `#407 <https://github.com/python-attrs/attrs/issues/407>`_
+- Slotted classes can now be made weakly referenceable by passing ``@attr.s(weakref_slot=True)``.
+ `#420 <https://github.com/python-attrs/attrs/issues/420>`_
+- Added *cache_hash* option to ``@attr.s`` which causes the hash code to be computed once and stored on the object.
+ `#426 <https://github.com/python-attrs/attrs/issues/426>`_
+- Attributes can be named ``property`` and ``itemgetter`` now.
+ `#430 <https://github.com/python-attrs/attrs/issues/430>`_
+- It is now possible to override a base class' class variable using only class annotations.
+ `#431 <https://github.com/python-attrs/attrs/issues/431>`_
+
+
+----
+
+
+18.1.0 (2018-05-03)
+-------------------
+
+Changes
+^^^^^^^
+
+- ``x=X(); x.cycle = x; repr(x)`` will no longer raise a ``RecursionError``, and will instead show as ``X(x=...)``.
+
+ `#95 <https://github.com/python-attrs/attrs/issues/95>`_
+- ``attr.ib(factory=f)`` is now syntactic sugar for the common case of ``attr.ib(default=attr.Factory(f))``.
+
+ `#178 <https://github.com/python-attrs/attrs/issues/178>`_,
+ `#356 <https://github.com/python-attrs/attrs/issues/356>`_
+- Added ``attr.field_dict()`` to return an ordered dictionary of ``attrs`` attributes for a class, whose keys are the attribute names.
+
+ `#290 <https://github.com/python-attrs/attrs/issues/290>`_,
+ `#349 <https://github.com/python-attrs/attrs/issues/349>`_
+- The order of attributes that are passed into ``attr.make_class()`` or the *these* argument of ``@attr.s()`` is now retained if the dictionary is ordered (i.e. ``dict`` on Python 3.6 and later, ``collections.OrderedDict`` otherwise).
+
+ Before, the order was always determined by the order in which the attributes have been defined which may not be desirable when creating classes programatically.
+
+ `#300 <https://github.com/python-attrs/attrs/issues/300>`_,
+ `#339 <https://github.com/python-attrs/attrs/issues/339>`_,
+ `#343 <https://github.com/python-attrs/attrs/issues/343>`_
+- In slotted classes, ``__getstate__`` and ``__setstate__`` now ignore the ``__weakref__`` attribute.
+
+ `#311 <https://github.com/python-attrs/attrs/issues/311>`_,
+ `#326 <https://github.com/python-attrs/attrs/issues/326>`_
+- Setting the cell type is now completely best effort.
+ This fixes ``attrs`` on Jython.
+
+ We cannot make any guarantees regarding Jython though, because our test suite cannot run due to dependency incompatabilities.
+
+ `#321 <https://github.com/python-attrs/attrs/issues/321>`_,
+ `#334 <https://github.com/python-attrs/attrs/issues/334>`_
+- If ``attr.s`` is passed a *these* argument, it will no longer attempt to remove attributes with the same name from the class body.
+
+ `#322 <https://github.com/python-attrs/attrs/issues/322>`_,
+ `#323 <https://github.com/python-attrs/attrs/issues/323>`_
+- The hash of ``attr.NOTHING`` is now vegan and faster on 32bit Python builds.
+
+ `#331 <https://github.com/python-attrs/attrs/issues/331>`_,
+ `#332 <https://github.com/python-attrs/attrs/issues/332>`_
+- The overhead of instantiating frozen dict classes is virtually eliminated.
+ `#336 <https://github.com/python-attrs/attrs/issues/336>`_
+- Generated ``__init__`` methods now have an ``__annotations__`` attribute derived from the types of the fields.
+
+ `#363 <https://github.com/python-attrs/attrs/issues/363>`_
+- We have restructured the documentation a bit to account for ``attrs``' growth in scope.
+ Instead of putting everything into the `examples <https://www.attrs.org/en/stable/examples.html>`_ page, we have started to extract narrative chapters.
+
+ So far, we've added chapters on `initialization <https://www.attrs.org/en/stable/init.html>`_ and `hashing <https://www.attrs.org/en/stable/hashing.html>`_.
+
+ Expect more to come!
+
+ `#369 <https://github.com/python-attrs/attrs/issues/369>`_,
+ `#370 <https://github.com/python-attrs/attrs/issues/370>`_
+
+
+----
+
+
+17.4.0 (2017-12-30)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- The traversal of MROs when using multiple inheritance was backward:
+ If you defined a class ``C`` that subclasses ``A`` and ``B`` like ``C(A, B)``, ``attrs`` would have collected the attributes from ``B`` *before* those of ``A``.
+
+ This is now fixed and means that in classes that employ multiple inheritance, the output of ``__repr__`` and the order of positional arguments in ``__init__`` changes.
+ Because of the nature of this bug, a proper deprecation cycle was unfortunately impossible.
+
+ Generally speaking, it's advisable to prefer ``kwargs``-based initialization anyways – *especially* if you employ multiple inheritance and diamond-shaped hierarchies.
+
+ `#298 <https://github.com/python-attrs/attrs/issues/298>`_,
+ `#299 <https://github.com/python-attrs/attrs/issues/299>`_,
+ `#304 <https://github.com/python-attrs/attrs/issues/304>`_
+- The ``__repr__`` set by ``attrs`` no longer produces an ``AttributeError`` when the instance is missing some of the specified attributes (either through deleting or after using ``init=False`` on some attributes).
+
+ This can break code that relied on ``repr(attr_cls_instance)`` raising ``AttributeError`` to check if any ``attrs``-specified members were unset.
+
+ If you were using this, you can implement a custom method for checking this::
+
+ def has_unset_members(self):
+ for field in attr.fields(type(self)):
+ try:
+ getattr(self, field.name)
+ except AttributeError:
+ return True
+ return False
+
+ `#308 <https://github.com/python-attrs/attrs/issues/308>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- The ``attr.ib(convert=callable)`` option is now deprecated in favor of ``attr.ib(converter=callable)``.
+
+ This is done to achieve consistency with other noun-based arguments like *validator*.
+
+ *convert* will keep working until at least January 2019 while raising a ``DeprecationWarning``.
+
+ `#307 <https://github.com/python-attrs/attrs/issues/307>`_
+
+
+Changes
+^^^^^^^
+
+- Generated ``__hash__`` methods now hash the class type along with the attribute values.
+ Until now the hashes of two classes with the same values were identical which was a bug.
+
+ The generated method is also *much* faster now.
+
+ `#261 <https://github.com/python-attrs/attrs/issues/261>`_,
+ `#295 <https://github.com/python-attrs/attrs/issues/295>`_,
+ `#296 <https://github.com/python-attrs/attrs/issues/296>`_
+- ``attr.ib``\ ’s *metadata* argument now defaults to a unique empty ``dict`` instance instead of sharing a common empty ``dict`` for all.
+ The singleton empty ``dict`` is still enforced.
+
+ `#280 <https://github.com/python-attrs/attrs/issues/280>`_
+- ``ctypes`` is optional now however if it's missing, a bare ``super()`` will not work in slotted classes.
+ This should only happen in special environments like Google App Engine.
+
+ `#284 <https://github.com/python-attrs/attrs/issues/284>`_,
+ `#286 <https://github.com/python-attrs/attrs/issues/286>`_
+- The attribute redefinition feature introduced in 17.3.0 now takes into account if an attribute is redefined via multiple inheritance.
+ In that case, the definition that is closer to the base of the class hierarchy wins.
+
+ `#285 <https://github.com/python-attrs/attrs/issues/285>`_,
+ `#287 <https://github.com/python-attrs/attrs/issues/287>`_
+- Subclasses of ``auto_attribs=True`` can be empty now.
+
+ `#291 <https://github.com/python-attrs/attrs/issues/291>`_,
+ `#292 <https://github.com/python-attrs/attrs/issues/292>`_
+- Equality tests are *much* faster now.
+
+ `#306 <https://github.com/python-attrs/attrs/issues/306>`_
+- All generated methods now have correct ``__module__``, ``__name__``, and (on Python 3) ``__qualname__`` attributes.
+
+ `#309 <https://github.com/python-attrs/attrs/issues/309>`_
+
+
+----
+
+
+17.3.0 (2017-11-08)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Attributes are no longer defined on the class body.
+
+ This means that if you define a class ``C`` with an attribute ``x``, the class will *not* have an attribute ``x`` for introspection.
+ Instead of ``C.x``, use ``attr.fields(C).x`` or look at ``C.__attrs_attrs__``.
+ The old behavior has been deprecated since version 16.1.
+ (`#253 <https://github.com/python-attrs/attrs/issues/253>`_)
+
+
+Changes
+^^^^^^^
+
+- ``super()`` and ``__class__`` now work with slotted classes on Python 3.
+ (`#102 <https://github.com/python-attrs/attrs/issues/102>`_, `#226 <https://github.com/python-attrs/attrs/issues/226>`_, `#269 <https://github.com/python-attrs/attrs/issues/269>`_, `#270 <https://github.com/python-attrs/attrs/issues/270>`_, `#272 <https://github.com/python-attrs/attrs/issues/272>`_)
+- Added *type* argument to ``attr.ib()`` and corresponding ``type`` attribute to ``attr.Attribute``.
+
+ This change paves the way for automatic type checking and serialization (though as of this release ``attrs`` does not make use of it).
+ In Python 3.6 or higher, the value of ``attr.Attribute.type`` can alternately be set using variable type annotations
+ (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
+ (`#151 <https://github.com/python-attrs/attrs/issues/151>`_, `#214 <https://github.com/python-attrs/attrs/issues/214>`_, `#215 <https://github.com/python-attrs/attrs/issues/215>`_, `#239 <https://github.com/python-attrs/attrs/issues/239>`_)
+- The combination of ``str=True`` and ``slots=True`` now works on Python 2.
+ (`#198 <https://github.com/python-attrs/attrs/issues/198>`_)
+- ``attr.Factory`` is hashable again.
+ (`#204 <https://github.com/python-attrs/attrs/issues/204>`_)
+- Subclasses now can overwrite attribute definitions of their base classes.
+
+ That means that you can -- for example -- change the default value for an attribute by redefining it.
+ (`#221 <https://github.com/python-attrs/attrs/issues/221>`_, `#229 <https://github.com/python-attrs/attrs/issues/229>`_)
+- Added new option *auto_attribs* to ``@attr.s`` that allows to collect annotated fields without setting them to ``attr.ib()``.
+
+ Setting a field to an ``attr.ib()`` is still possible to supply options like validators.
+ Setting it to any other value is treated like it was passed as ``attr.ib(default=value)`` -- passing an instance of ``attr.Factory`` also works as expected.
+ (`#262 <https://github.com/python-attrs/attrs/issues/262>`_, `#277 <https://github.com/python-attrs/attrs/issues/277>`_)
+- Instances of classes created using ``attr.make_class()`` can now be pickled.
+ (`#282 <https://github.com/python-attrs/attrs/issues/282>`_)
+
+
+----
+
+
+17.2.0 (2017-05-24)
+-------------------
+
+
+Changes:
+^^^^^^^^
+
+- Validators are hashable again.
+ Note that validators may become frozen in the future, pending availability of no-overhead frozen classes.
+ `#192 <https://github.com/python-attrs/attrs/issues/192>`_
+
+
+----
+
+
+17.1.0 (2017-05-16)
+-------------------
+
+To encourage more participation, the project has also been moved into a `dedicated GitHub organization <https://github.com/python-attrs/>`_ and everyone is most welcome to join!
+
+``attrs`` also has a logo now!
+
+.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
+ :alt: attrs logo
+
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- ``attrs`` will set the ``__hash__()`` method to ``None`` by default now.
+ The way hashes were handled before was in conflict with `Python's specification <https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_.
+ This *may* break some software although this breakage is most likely just surfacing of latent bugs.
+ You can always make ``attrs`` create the ``__hash__()`` method using ``@attr.s(hash=True)``.
+ See `#136`_ for the rationale of this change.
+
+ .. warning::
+
+ Please *do not* upgrade blindly and *do* test your software!
+ *Especially* if you use instances as dict keys or put them into sets!
+
+- Correspondingly, ``attr.ib``'s *hash* argument is ``None`` by default too and mirrors the *cmp* argument as it should.
+
+
+Deprecations:
+^^^^^^^^^^^^^
+
+- ``attr.assoc()`` is now deprecated in favor of ``attr.evolve()`` and will stop working in 2018.
+
+
+Changes:
+^^^^^^^^
+
+- Fix default hashing behavior.
+ Now *hash* mirrors the value of *cmp* and classes are unhashable by default.
+ `#136`_
+ `#142 <https://github.com/python-attrs/attrs/issues/142>`_
+- Added ``attr.evolve()`` that, given an instance of an ``attrs`` class and field changes as keyword arguments, will instantiate a copy of the given instance with the changes applied.
+ ``evolve()`` replaces ``assoc()``, which is now deprecated.
+ ``evolve()`` is significantly faster than ``assoc()``, and requires the class have an initializer that can take the field values as keyword arguments (like ``attrs`` itself can generate).
+ `#116 <https://github.com/python-attrs/attrs/issues/116>`_
+ `#124 <https://github.com/python-attrs/attrs/pull/124>`_
+ `#135 <https://github.com/python-attrs/attrs/pull/135>`_
+- ``FrozenInstanceError`` is now raised when trying to delete an attribute from a frozen class.
+ `#118 <https://github.com/python-attrs/attrs/pull/118>`_
+- Frozen-ness of classes is now inherited.
+ `#128 <https://github.com/python-attrs/attrs/pull/128>`_
+- ``__attrs_post_init__()`` is now run if validation is disabled.
+ `#130 <https://github.com/python-attrs/attrs/pull/130>`_
+- Added ``attr.validators.in_(options)`` that, given the allowed ``options``, checks whether the attribute value is in it.
+ This can be used to check constants, enums, mappings, etc.
+ `#181 <https://github.com/python-attrs/attrs/pull/181>`_
+- Added ``attr.validators.and_()`` that composes multiple validators into one.
+ `#161 <https://github.com/python-attrs/attrs/issues/161>`_
+- For convenience, the *validator* argument of ``@attr.s`` now can take a list of validators that are wrapped using ``and_()``.
+ `#138 <https://github.com/python-attrs/attrs/issues/138>`_
+- Accordingly, ``attr.validators.optional()`` now can take a list of validators too.
+ `#161 <https://github.com/python-attrs/attrs/issues/161>`_
+- Validators can now be defined conveniently inline by using the attribute as a decorator.
+ Check out the `validator examples <http://www.attrs.org/en/stable/init.html#decorator>`_ to see it in action!
+ `#143 <https://github.com/python-attrs/attrs/issues/143>`_
+- ``attr.Factory()`` now has a *takes_self* argument that makes the initializer to pass the partially initialized instance into the factory.
+ In other words you can define attribute defaults based on other attributes.
+ `#165`_
+ `#189 <https://github.com/python-attrs/attrs/issues/189>`_
+- Default factories can now also be defined inline using decorators.
+ They are *always* passed the partially initialized instance.
+ `#165`_
+- Conversion can now be made optional using ``attr.converters.optional()``.
+ `#105 <https://github.com/python-attrs/attrs/issues/105>`_
+ `#173 <https://github.com/python-attrs/attrs/pull/173>`_
+- ``attr.make_class()`` now accepts the keyword argument ``bases`` which allows for subclassing.
+ `#152 <https://github.com/python-attrs/attrs/pull/152>`_
+- Metaclasses are now preserved with ``slots=True``.
+ `#155 <https://github.com/python-attrs/attrs/pull/155>`_
+
+.. _`#136`: https://github.com/python-attrs/attrs/issues/136
+.. _`#165`: https://github.com/python-attrs/attrs/issues/165
+
+
+----
+
+
+16.3.0 (2016-11-24)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Attributes now can have user-defined metadata which greatly improves ``attrs``'s extensibility.
+ `#96 <https://github.com/python-attrs/attrs/pull/96>`_
+- Allow for a ``__attrs_post_init__()`` method that -- if defined -- will get called at the end of the ``attrs``-generated ``__init__()`` method.
+ `#111 <https://github.com/python-attrs/attrs/pull/111>`_
+- Added ``@attr.s(str=True)`` that will optionally create a ``__str__()`` method that is identical to ``__repr__()``.
+ This is mainly useful with ``Exception``\ s and other classes that rely on a useful ``__str__()`` implementation but overwrite the default one through a poor own one.
+ Default Python class behavior is to use ``__repr__()`` as ``__str__()`` anyways.
+
+ If you tried using ``attrs`` with ``Exception``\ s and were puzzled by the tracebacks: this option is for you.
+- ``__name__`` is no longer overwritten with ``__qualname__`` for ``attr.s(slots=True)`` classes.
+ `#99 <https://github.com/python-attrs/attrs/issues/99>`_
+
+
+----
+
+
+16.2.0 (2016-09-17)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added ``attr.astuple()`` that -- similarly to ``attr.asdict()`` -- returns the instance as a tuple.
+ `#77 <https://github.com/python-attrs/attrs/issues/77>`_
+- Converters now work with frozen classes.
+ `#76 <https://github.com/python-attrs/attrs/issues/76>`_
+- Instantiation of ``attrs`` classes with converters is now significantly faster.
+ `#80 <https://github.com/python-attrs/attrs/pull/80>`_
+- Pickling now works with slotted classes.
+ `#81 <https://github.com/python-attrs/attrs/issues/81>`_
+- ``attr.assoc()`` now works with slotted classes.
+ `#84 <https://github.com/python-attrs/attrs/issues/84>`_
+- The tuple returned by ``attr.fields()`` now also allows to access the ``Attribute`` instances by name.
+ Yes, we've subclassed ``tuple`` so you don't have to!
+ Therefore ``attr.fields(C).x`` is equivalent to the deprecated ``C.x`` and works with slotted classes.
+ `#88 <https://github.com/python-attrs/attrs/issues/88>`_
+
+
+----
+
+
+16.1.0 (2016-08-30)
+-------------------
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- All instances where function arguments were called ``cl`` have been changed to the more Pythonic ``cls``.
+ Since it was always the first argument, it's doubtful anyone ever called those function with in the keyword form.
+ If so, sorry for any breakage but there's no practical deprecation path to solve this ugly wart.
+
+
+Deprecations:
+^^^^^^^^^^^^^
+
+- Accessing ``Attribute`` instances on class objects is now deprecated and will stop working in 2017.
+ If you need introspection please use the ``__attrs_attrs__`` attribute or the ``attr.fields()`` function that carry them too.
+ In the future, the attributes that are defined on the class body and are usually overwritten in your ``__init__`` method are simply removed after ``@attr.s`` has been applied.
+
+ This will remove the confusing error message if you write your own ``__init__`` and forget to initialize some attribute.
+ Instead you will get a straightforward ``AttributeError``.
+ In other words: decorated classes will work more like plain Python classes which was always ``attrs``'s goal.
+- The serious business aliases ``attr.attributes`` and ``attr.attr`` have been deprecated in favor of ``attr.attrs`` and ``attr.attrib`` which are much more consistent and frankly obvious in hindsight.
+ They will be purged from documentation immediately but there are no plans to actually remove them.
+
+
+Changes:
+^^^^^^^^
+
+- ``attr.asdict()``\ 's ``dict_factory`` arguments is now propagated on recursion.
+ `#45 <https://github.com/python-attrs/attrs/issues/45>`_
+- ``attr.asdict()``, ``attr.has()`` and ``attr.fields()`` are significantly faster.
+ `#48 <https://github.com/python-attrs/attrs/issues/48>`_
+ `#51 <https://github.com/python-attrs/attrs/issues/51>`_
+- Add ``attr.attrs`` and ``attr.attrib`` as a more consistent aliases for ``attr.s`` and ``attr.ib``.
+- Add *frozen* option to ``attr.s`` that will make instances best-effort immutable.
+ `#60 <https://github.com/python-attrs/attrs/issues/60>`_
+- ``attr.asdict()`` now takes ``retain_collection_types`` as an argument.
+ If ``True``, it does not convert attributes of type ``tuple`` or ``set`` to ``list``.
+ `#69 <https://github.com/python-attrs/attrs/issues/69>`_
+
+
+----
+
+
+16.0.0 (2016-05-23)
+-------------------
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Python 3.3 and 2.6 are no longer supported.
+ They may work by chance but any effort to keep them working has ceased.
+
+ The last Python 2.6 release was on October 29, 2013 and is no longer supported by the CPython core team.
+ Major Python packages like Django and Twisted dropped Python 2.6 a while ago already.
+
+ Python 3.3 never had a significant user base and wasn't part of any distribution's LTS release.
+
+Changes:
+^^^^^^^^
+
+- ``__slots__`` have arrived!
+ Classes now can automatically be `slotted <https://docs.python.org/3/reference/datamodel.html#slots>`_-style (and save your precious memory) just by passing ``slots=True``.
+ `#35 <https://github.com/python-attrs/attrs/issues/35>`_
+- Allow the case of initializing attributes that are set to ``init=False``.
+ This allows for clean initializer parameter lists while being able to initialize attributes to default values.
+ `#32 <https://github.com/python-attrs/attrs/issues/32>`_
+- ``attr.asdict()`` can now produce arbitrary mappings instead of Python ``dict``\ s when provided with a ``dict_factory`` argument.
+ `#40 <https://github.com/python-attrs/attrs/issues/40>`_
+- Multiple performance improvements.
+
+
+----
+
+
+15.2.0 (2015-12-08)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added a ``convert`` argument to ``attr.ib``, which allows specifying a function to run on arguments.
+ This allows for simple type conversions, e.g. with ``attr.ib(convert=int)``.
+ `#26 <https://github.com/python-attrs/attrs/issues/26>`_
+- Speed up object creation when attribute validators are used.
+ `#28 <https://github.com/python-attrs/attrs/issues/28>`_
+
+
+----
+
+
+15.1.0 (2015-08-20)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added ``attr.validators.optional()`` that wraps other validators allowing attributes to be ``None``.
+ `#16 <https://github.com/python-attrs/attrs/issues/16>`_
+- Multi-level inheritance now works.
+ `#24 <https://github.com/python-attrs/attrs/issues/24>`_
+- ``__repr__()`` now works with non-redecorated subclasses.
+ `#20 <https://github.com/python-attrs/attrs/issues/20>`_
+
+
+----
+
+
+15.0.0 (2015-04-15)
+-------------------
+
+Changes:
+^^^^^^^^
+
+Initial release.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/LICENSE b/testing/web-platform/tests/tools/third_party/attrs/LICENSE
new file mode 100644
index 0000000000..7ae3df9309
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in b/testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in
new file mode 100644
index 0000000000..3d68bf9c5d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in
@@ -0,0 +1,24 @@
+include LICENSE *.rst *.toml *.yml *.yaml *.ini
+graft .github
+
+# Stubs
+recursive-include src *.pyi
+recursive-include src py.typed
+
+# Tests
+include tox.ini conftest.py
+recursive-include tests *.py
+recursive-include tests *.yml
+
+# Documentation
+include docs/Makefile docs/docutils.conf
+recursive-include docs *.png
+recursive-include docs *.svg
+recursive-include docs *.py
+recursive-include docs *.rst
+prune docs/_build
+
+# Just to keep check-manifest happy; on releases those files are gone.
+# Last rule wins!
+exclude changelog.d/*.rst
+include changelog.d/towncrier_template.rst
diff --git a/testing/web-platform/tests/tools/third_party/attrs/README.rst b/testing/web-platform/tests/tools/third_party/attrs/README.rst
new file mode 100644
index 0000000000..709bba83d7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/README.rst
@@ -0,0 +1,135 @@
+.. raw:: html
+
+ <p align="center">
+ <a href="https://www.attrs.org/">
+ <img src="./docs/_static/attrs_logo.svg" width="35%" alt="attrs" />
+ </a>
+ </p>
+ <p align="center">
+ <a href="https://www.attrs.org/en/stable/?badge=stable">
+ <img src="https://img.shields.io/badge/Docs-Read%20The%20Docs-black" alt="Documentation" />
+ </a>
+ <a href="https://github.com/python-attrs/attrs/blob/main/LICENSE">
+ <img src="https://img.shields.io/badge/license-MIT-C06524" alt="License: MIT" />
+ </a>
+ <a href="https://pypi.org/project/attrs/">
+ <img src="https://img.shields.io/pypi/v/attrs" />
+ </a>
+ </p>
+
+.. teaser-begin
+
+``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder methods <https://www.attrs.org/en/latest/glossary.html#term-dunder-methods>`_).
+`Trusted by NASA <https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-badge>`_ for Mars missions since 2020!
+
+Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+.. teaser-end
+
+For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
+
+.. -code-begin-
+
+.. code-block:: pycon
+
+ >>> from attrs import asdict, define, make_class, Factory
+
+ >>> @define
+ ... class SomeClass:
+ ... a_number: int = 42
+ ... list_of_numbers: list[int] = Factory(list)
+ ...
+ ... def hard_math(self, another_number):
+ ... return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+ >>> sc = SomeClass(1, [1, 2, 3])
+ >>> sc
+ SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+ >>> sc.hard_math(3)
+ 19
+ >>> sc == SomeClass(1, [1, 2, 3])
+ True
+ >>> sc != SomeClass(2, [3, 2, 1])
+ True
+
+ >>> asdict(sc)
+ {'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+ >>> SomeClass()
+ SomeClass(a_number=42, list_of_numbers=[])
+
+ >>> C = make_class("C", ["a", "b"])
+ >>> C("foo", "bar")
+ C(a='foo', b='bar')
+
+
+After *declaring* your attributes ``attrs`` gives you:
+
+- a concise and explicit overview of the class's attributes,
+- a nice human-readable ``__repr__``,
+- a equality-checking methods,
+- an initializer,
+- and much more,
+
+*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+**Hate type annotations**!?
+No problem!
+Types are entirely **optional** with ``attrs``.
+Simply assign ``attrs.field()`` to the attributes instead of annotating them with types.
+
+----
+
+This example uses ``attrs``'s modern APIs that have been introduced in version 20.1.0, and the ``attrs`` package import name that has been added in version 21.3.0.
+The classic APIs (``@attr.s``, ``attr.ib``, plus their serious business aliases) and the ``attr`` package import name will remain **indefinitely**.
+
+Please check out `On The Core API Names <https://www.attrs.org/en/latest/names.html>`_ for a more in-depth explanation.
+
+
+Data Classes
+============
+
+On the tin, ``attrs`` might remind you of ``dataclasses`` (and indeed, ``dataclasses`` are a descendant of ``attrs``).
+In practice it does a lot more and is more flexible.
+For instance it allows you to define `special handling of NumPy arrays for equality checks <https://www.attrs.org/en/stable/comparison.html#customization>`_, or allows more ways to `plug into the initialization process <https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization>`_.
+
+For more details, please refer to our `comparison page <https://www.attrs.org/en/stable/why.html#data-classes>`_.
+
+
+.. -getting-help-
+
+Getting Help
+============
+
+Please use the ``python-attrs`` tag on `Stack Overflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
+
+Answering questions of your fellow developers is also a great way to help the project!
+
+
+.. -project-information-
+
+Project Information
+===================
+
+``attrs`` is released under the `MIT <https://choosealicense.com/licenses/mit/>`_ license,
+its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
+the code on `GitHub <https://github.com/python-attrs/attrs>`_,
+and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
+It’s rigorously tested on Python 2.7, 3.5+, and PyPy.
+
+We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
+Feel free to browse and add your own!
+
+If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md>`_ to get you started!
+
+
+``attrs`` for Enterprise
+------------------------
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
+Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
+`Learn more. <https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo>`_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore
diff --git a/testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst
new file mode 100644
index 0000000000..29ca74c4e8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst
@@ -0,0 +1,35 @@
+{% for section, _ in sections.items() %}
+{% set underline = underlines[0] %}{% if section %}{{section}}
+{{ underline * section|length }}{% set underline = underlines[1] %}
+
+{% endif %}
+
+{% if sections[section] %}
+{% for category, val in definitions.items() if category in sections[section]%}
+{{ definitions[category]['name'] }}
+{{ underline * definitions[category]['name']|length }}
+
+{% if definitions[category]['showcontent'] %}
+{% for text, values in sections[section][category].items() %}
+- {{ text }}
+ {{ values|join(',\n ') }}
+{% endfor %}
+
+{% else %}
+- {{ sections[section][category]['']|join(', ') }}
+
+{% endif %}
+{% if sections[section][category]|length == 0 %}
+No significant changes.
+
+{% else %}
+{% endif %}
+
+{% endfor %}
+{% else %}
+No significant changes.
+
+
+{% endif %}
+{% endfor %}
+----
diff --git a/testing/web-platform/tests/tools/third_party/attrs/conftest.py b/testing/web-platform/tests/tools/third_party/attrs/conftest.py
new file mode 100644
index 0000000000..0d539a115c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/conftest.py
@@ -0,0 +1,29 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+from hypothesis import HealthCheck, settings
+
+from attr._compat import PY36, PY310
+
+
+def pytest_configure(config):
+ # HealthCheck.too_slow causes more trouble than good -- especially in CIs.
+ settings.register_profile(
+ "patience", settings(suppress_health_check=[HealthCheck.too_slow])
+ )
+ settings.load_profile("patience")
+
+
+collect_ignore = []
+if not PY36:
+ collect_ignore.extend(
+ [
+ "tests/test_annotations.py",
+ "tests/test_hooks.py",
+ "tests/test_init_subclass.py",
+ "tests/test_next_gen.py",
+ ]
+ )
+if not PY310:
+ collect_ignore.extend(["tests/test_pattern_matching.py"])
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/Makefile b/testing/web-platform/tests/tools/third_party/attrs/docs/Makefile
new file mode 100644
index 0000000000..3143891daf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/attrs.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/attrs.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/attrs"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/attrs"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.png b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.png
new file mode 100644
index 0000000000..11b6e6fe3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg
new file mode 100644
index 0000000000..b02ae6c025
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 142 118" version="1.1" fill="#222">
+ <path d="M 88.984375 4.2460938 L 88.269531 6.40625 A 13.133 13.133 0 0 0 85.244141 6.9023438 L 83.9375 5.0625 L 83.699219 5.1523438 A 14.916 14.916 0 0 0 80.007812 7.0859375 L 79.8125 7.2265625 L 80.507812 9.40625 A 12.958 12.958 0 0 0 78.490234 11.496094 L 76.351562 10.785156 L 76.214844 10.996094 A 15.179 15.179 0 0 0 74.369141 14.8125 L 74.304688 15.035156 L 76.15625 16.398438 C 76.14425 16.418437 76.140719 16.441031 76.136719 16.457031 A 12.874 12.874 0 0 0 75.796875 19.035156 L 73.640625 19.71875 L 73.652344 19.964844 C 73.706344 21.432844 73.959109 22.867563 74.412109 24.226562 L 74.488281 24.453125 L 76.796875 24.464844 C 77.109875 25.214844 77.488594 25.930281 77.933594 26.613281 L 76.585938 28.441406 L 76.734375 28.636719 A 15.028 15.028 0 0 0 79.863281 31.710938 L 80.054688 31.851562 L 81.921875 30.515625 C 82.562875 30.917625 83.24975 31.265687 83.96875 31.554688 L 83.951172 33.835938 L 84.183594 33.910156 C 84.570594 34.031156 84.960281 34.144188 85.363281 34.242188 C 86.425281 34.488188 87.48425 34.621531 88.53125 34.644531 L 88.773438 34.648438 L 89.490234 32.484375 A 12.819 12.819 0 0 0 91.787109 32.167969 L 93.123047 34.03125 L 93.355469 33.957031 A 15.097 15.097 0 0 0 97.300781 32.070312 L 97.503906 31.933594 L 96.824219 29.773438 A 13.195 13.195 0 0 0 98.628906 28.085938 L 100.8125 28.8125 L 100.95508 28.621094 A 14.78 14.78 0 0 0 103.04688 24.859375 L 103.13672 24.621094 L 101.32031 23.285156 C 101.40631 23.008156 101.48078 22.726313 101.55078 22.445312 C 101.69178 21.832313 101.78875 21.226 101.84375 20.625 L 104.0332 19.929688 L 104.0332 19.691406 C 104.0332 19.605406 104.04297 19.518687 104.04297 19.429688 A 15.365 15.365 0 0 0 103.51953 15.5 L 103.45117 15.257812 L 101.19922 15.246094 A 13.253 13.253 0 0 0 99.941406 12.582031 L 101.29297 10.738281 L 101.15625 10.546875 A 15.367 15.367 0 0 0 98.287109 7.5429688 L 98.09375 7.3867188 L 96.253906 8.703125 A 13.082 13.082 0 0 0 93.53125 7.265625 L 93.542969 5 L 93.3125 4.9257812 A 18.186 18.186 0 0 0 92.320312 4.6523438 A 15.815 15.815 0 0 0 89.234375 4.25 L 88.984375 4.2460938 z M 88.759766 15.541016 A 3.914 3.914 0 0 1 89.740234 15.644531 A 3.913 3.913 0 0 1 92.753906 19.441406 C 92.753906 19.742406 92.722344 20.04275 92.652344 20.34375 A 3.92 3.92 0 0 1 88.847656 23.359375 A 3.72 3.72 0 0 1 87.949219 23.25 C 86.144219 22.836 84.9375 21.226125 84.9375 19.453125 C 84.9375 19.156125 84.967203 18.858688 85.033203 18.554688 A 3.914 3.914 0 0 1 88.759766 15.541016 z "/>
+ <path d="M 60.488281 22.824219 C 58.968281 22.824219 57.488594 22.98425 56.058594 23.28125 L 55.78125 23.332031 L 55.488281 26.582031 C 54.023281 26.992031 52.624219 27.5785 51.324219 28.3125 L 48.886719 26.179688 L 48.648438 26.335938 A 21.852 21.852 0 0 0 44.152344 30.230469 L 43.972656 30.4375 L 45.65625 33.257812 A 18.478 18.478 0 0 0 43.46875 36.933594 L 40.248047 36.644531 L 40.15625 36.910156 A 21.157 21.157 0 0 0 38.84375 42.828125 L 38.820312 43.09375 L 41.855469 44.390625 C 41.851469 44.437625 41.851562 44.488063 41.851562 44.539062 C 41.851562 45.828063 41.988281 47.093687 42.238281 48.304688 L 39.455078 49.960938 L 39.527344 50.234375 A 21.58 21.58 0 0 0 41.980469 55.90625 L 42.119141 56.132812 L 45.34375 55.402344 A 18.763 18.763 0 0 0 47.714844 58.105469 L 46.4375 61.097656 L 46.648438 61.277344 A 21.703 21.703 0 0 0 52.007812 64.535156 L 52.248047 64.640625 L 54.425781 62.160156 C 55.484781 62.527156 56.601281 62.80075 57.738281 62.96875 L 58.464844 66.15625 L 58.744141 66.175781 C 59.307141 66.222781 59.894281 66.253906 60.488281 66.253906 C 62.042281 66.253906 63.558437 66.086437 65.023438 65.773438 L 65.296875 65.714844 L 65.589844 62.460938 A 19.053 19.053 0 0 0 68.792969 61.21875 L 71.269531 63.382812 L 71.503906 63.246094 A 21.892 21.892 0 0 0 76.378906 59.328125 L 76.574219 59.125 L 74.908203 56.335938 A 18.426 18.426 0 0 0 76.9375 53.289062 L 80.230469 53.585938 L 80.335938 53.335938 A 21.627 21.627 0 0 0 82.007812 47.414062 L 82.042969 47.128906 L 79.066406 45.859375 C 79.097406 45.430375 79.119141 44.988062 79.119141 44.539062 C 79.119141 43.625062 79.054781 42.734375 78.925781 41.859375 L 81.757812 40.171875 L 81.699219 39.90625 A 21.733 21.733 0 0 0 79.613281 34.246094 L 79.476562 33.992188 L 76.320312 34.714844 A 18.63 18.63 0 0 0 73.617188 31.320312 L 74.902344 28.308594 L 74.701172 28.132812 A 22.087 22.087 0 0 0 69.726562 24.886719 L 69.472656 24.769531 L 67.335938 27.210938 A 18.403 18.403 0 0 0 62.949219 26.074219 L 62.222656 22.898438 L 61.945312 22.882812 A 19.927 19.927 0 0 0 60.488281 22.824219 z M 60.488281 38.824219 C 63.644281 38.836219 66.199219 41.387062 66.199219 44.539062 A 5.715 5.715 0 0 1 60.488281 50.253906 A 5.717 5.717 0 0 1 54.773438 44.539062 A 5.725 5.725 0 0 1 60.488281 38.824219 z "/>
+ <path d="m 134.226,94.281 c 0,0 0.445,2.621 -0.574,7.356 -1.024,4.796 -2.559,7.351 -2.559,7.351 a 31.76,31.76 0 0 1 -10.809,1.922 c -3.773,0 -7.16,-0.707 -9.976,-1.922 0,0 -0.383,-1.726 0.129,-4.988 1.406,0.387 6.457,1.793 10.933,1.793 2.497,0 5.375,-0.703 5.375,-0.703 0,0 0.704,-1.153 1.149,-3.453 0.512,-2.305 0.32,-3.454 0.32,-3.454 0,0 -2.558,-0.703 -5.051,-0.703 -3.902,0 -7.226,-0.64 -10.039,-1.855 0,0 -0.386,-2.879 0.383,-6.524 0.766,-3.707 2.43,-6.585 2.43,-6.585 3.324,-1.153 7.035,-1.856 10.808,-1.856 3.77,0 7.161,0.703 9.973,1.856 0,0.128 0.387,2.046 -0.062,5.179 -1.536,-0.449 -7.165,-1.918 -11,-1.918 -2.493,0 -5.372,0.703 -5.372,0.703 0,0 -0.64,1.024 -0.957,2.621 -0.32,1.598 -0.195,2.622 -0.195,2.622 0,0 2.496,0.64 5.117,0.703 3.774,0 7.164,0.64 9.977,1.855 z"/>
+ <path d="m 105.511,80.66 c 1.984,0 3.84,0.191 5.629,0.578 -0.703,1.727 -1.469,3.324 -2.367,4.86 -1.406,-0.192 -2.813,-0.321 -4.348,-0.321 -2.492,0 -5.242,0.703 -5.242,0.703 0,0 -1.856,6.075 -2.496,9.274 L 93.62,110.269 H 87.8 l 3.07,-14.515 a 84.252,84.252 0 0 1 3.836,-13.238 c 3.325,-1.153 7.035,-1.856 10.805,-1.856 z"/>
+ <path d="m 77.374,105.793 c 2.817,0 5.629,-0.512 7.867,-1.024 -0.765,1.981 -1.664,3.774 -2.621,5.5 -2.046,0.383 -4.156,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.91,-1.922 0.125,-3.965 0.766,-8.441 1.789,-13.234 l 1.918,-9.082 h -6.457 c 0.703,-1.789 1.469,-3.453 2.492,-5.051 h 5.055 l 1.34,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.719 l -1.922,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="m 49.878,105.793 c 2.813,0 5.629,-0.512 7.867,-1.024 -0.769,1.981 -1.664,3.774 -2.621,5.5 -2.047,0.383 -4.16,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.914,-1.922 0.129,-3.965 0.77,-8.441 1.793,-13.234 l 1.918,-9.082 h -6.461 c 0.707,-1.789 1.473,-3.453 2.496,-5.051 h 5.051 l 1.344,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.723 l -1.918,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="M 22.574219 80.660156 C 18.800219 80.660156 15.093625 81.362625 11.765625 82.515625 C 11.128625 84.112625 10.616109 85.715406 10.037109 87.441406 C 12.022109 86.863406 16.624281 85.777344 21.488281 85.777344 C 23.980281 85.777344 26.476562 86.480469 26.476562 86.480469 C 26.476562 86.480469 26.089531 89.101062 25.644531 91.789062 C 23.980531 91.469062 22.191938 91.277344 20.335938 91.277344 A 32.101 32.101 0 0 0 9.4648438 93.195312 C 9.4648437 93.195312 7.8003437 96.328 6.7773438 101.125 C 5.7533437 105.855 6.140625 108.98828 6.140625 108.98828 C 8.952625 110.20328 12.343281 110.91016 16.113281 110.91016 A 31.74 31.74 0 0 0 26.921875 108.98828 C 28.456875 105.02328 29.734813 100.54691 30.757812 95.753906 C 31.718813 91.018906 32.359781 86.542625 32.550781 82.515625 C 29.734781 81.362625 26.347219 80.660156 22.574219 80.660156 z M 19.248047 96.390625 A 21.116 21.116 0 0 1 24.619141 97.09375 C 23.850141 100.42175 22.511719 105.08984 22.511719 105.08984 C 22.511719 105.08984 19.951172 105.79297 17.201172 105.79297 C 14.705172 105.79297 12.152344 105.08984 12.152344 105.08984 C 12.152344 105.08984 12.085656 103.426 12.597656 101.125 C 13.109656 98.758 13.8125 97.09375 13.8125 97.09375 C 13.8125 97.09375 16.752047 96.390625 19.248047 96.390625 z "/>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg
new file mode 100644
index 0000000000..daad798da0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 142 118" version="1.1" fill="#fff">
+ <path d="M 88.984375 4.2460938 L 88.269531 6.40625 A 13.133 13.133 0 0 0 85.244141 6.9023438 L 83.9375 5.0625 L 83.699219 5.1523438 A 14.916 14.916 0 0 0 80.007812 7.0859375 L 79.8125 7.2265625 L 80.507812 9.40625 A 12.958 12.958 0 0 0 78.490234 11.496094 L 76.351562 10.785156 L 76.214844 10.996094 A 15.179 15.179 0 0 0 74.369141 14.8125 L 74.304688 15.035156 L 76.15625 16.398438 C 76.14425 16.418437 76.140719 16.441031 76.136719 16.457031 A 12.874 12.874 0 0 0 75.796875 19.035156 L 73.640625 19.71875 L 73.652344 19.964844 C 73.706344 21.432844 73.959109 22.867563 74.412109 24.226562 L 74.488281 24.453125 L 76.796875 24.464844 C 77.109875 25.214844 77.488594 25.930281 77.933594 26.613281 L 76.585938 28.441406 L 76.734375 28.636719 A 15.028 15.028 0 0 0 79.863281 31.710938 L 80.054688 31.851562 L 81.921875 30.515625 C 82.562875 30.917625 83.24975 31.265687 83.96875 31.554688 L 83.951172 33.835938 L 84.183594 33.910156 C 84.570594 34.031156 84.960281 34.144188 85.363281 34.242188 C 86.425281 34.488188 87.48425 34.621531 88.53125 34.644531 L 88.773438 34.648438 L 89.490234 32.484375 A 12.819 12.819 0 0 0 91.787109 32.167969 L 93.123047 34.03125 L 93.355469 33.957031 A 15.097 15.097 0 0 0 97.300781 32.070312 L 97.503906 31.933594 L 96.824219 29.773438 A 13.195 13.195 0 0 0 98.628906 28.085938 L 100.8125 28.8125 L 100.95508 28.621094 A 14.78 14.78 0 0 0 103.04688 24.859375 L 103.13672 24.621094 L 101.32031 23.285156 C 101.40631 23.008156 101.48078 22.726313 101.55078 22.445312 C 101.69178 21.832313 101.78875 21.226 101.84375 20.625 L 104.0332 19.929688 L 104.0332 19.691406 C 104.0332 19.605406 104.04297 19.518687 104.04297 19.429688 A 15.365 15.365 0 0 0 103.51953 15.5 L 103.45117 15.257812 L 101.19922 15.246094 A 13.253 13.253 0 0 0 99.941406 12.582031 L 101.29297 10.738281 L 101.15625 10.546875 A 15.367 15.367 0 0 0 98.287109 7.5429688 L 98.09375 7.3867188 L 96.253906 8.703125 A 13.082 13.082 0 0 0 93.53125 7.265625 L 93.542969 5 L 93.3125 4.9257812 A 18.186 18.186 0 0 0 92.320312 4.6523438 A 15.815 15.815 0 0 0 89.234375 4.25 L 88.984375 4.2460938 z M 88.759766 15.541016 A 3.914 3.914 0 0 1 89.740234 15.644531 A 3.913 3.913 0 0 1 92.753906 19.441406 C 92.753906 19.742406 92.722344 20.04275 92.652344 20.34375 A 3.92 3.92 0 0 1 88.847656 23.359375 A 3.72 3.72 0 0 1 87.949219 23.25 C 86.144219 22.836 84.9375 21.226125 84.9375 19.453125 C 84.9375 19.156125 84.967203 18.858688 85.033203 18.554688 A 3.914 3.914 0 0 1 88.759766 15.541016 z "/>
+ <path d="M 60.488281 22.824219 C 58.968281 22.824219 57.488594 22.98425 56.058594 23.28125 L 55.78125 23.332031 L 55.488281 26.582031 C 54.023281 26.992031 52.624219 27.5785 51.324219 28.3125 L 48.886719 26.179688 L 48.648438 26.335938 A 21.852 21.852 0 0 0 44.152344 30.230469 L 43.972656 30.4375 L 45.65625 33.257812 A 18.478 18.478 0 0 0 43.46875 36.933594 L 40.248047 36.644531 L 40.15625 36.910156 A 21.157 21.157 0 0 0 38.84375 42.828125 L 38.820312 43.09375 L 41.855469 44.390625 C 41.851469 44.437625 41.851562 44.488063 41.851562 44.539062 C 41.851562 45.828063 41.988281 47.093687 42.238281 48.304688 L 39.455078 49.960938 L 39.527344 50.234375 A 21.58 21.58 0 0 0 41.980469 55.90625 L 42.119141 56.132812 L 45.34375 55.402344 A 18.763 18.763 0 0 0 47.714844 58.105469 L 46.4375 61.097656 L 46.648438 61.277344 A 21.703 21.703 0 0 0 52.007812 64.535156 L 52.248047 64.640625 L 54.425781 62.160156 C 55.484781 62.527156 56.601281 62.80075 57.738281 62.96875 L 58.464844 66.15625 L 58.744141 66.175781 C 59.307141 66.222781 59.894281 66.253906 60.488281 66.253906 C 62.042281 66.253906 63.558437 66.086437 65.023438 65.773438 L 65.296875 65.714844 L 65.589844 62.460938 A 19.053 19.053 0 0 0 68.792969 61.21875 L 71.269531 63.382812 L 71.503906 63.246094 A 21.892 21.892 0 0 0 76.378906 59.328125 L 76.574219 59.125 L 74.908203 56.335938 A 18.426 18.426 0 0 0 76.9375 53.289062 L 80.230469 53.585938 L 80.335938 53.335938 A 21.627 21.627 0 0 0 82.007812 47.414062 L 82.042969 47.128906 L 79.066406 45.859375 C 79.097406 45.430375 79.119141 44.988062 79.119141 44.539062 C 79.119141 43.625062 79.054781 42.734375 78.925781 41.859375 L 81.757812 40.171875 L 81.699219 39.90625 A 21.733 21.733 0 0 0 79.613281 34.246094 L 79.476562 33.992188 L 76.320312 34.714844 A 18.63 18.63 0 0 0 73.617188 31.320312 L 74.902344 28.308594 L 74.701172 28.132812 A 22.087 22.087 0 0 0 69.726562 24.886719 L 69.472656 24.769531 L 67.335938 27.210938 A 18.403 18.403 0 0 0 62.949219 26.074219 L 62.222656 22.898438 L 61.945312 22.882812 A 19.927 19.927 0 0 0 60.488281 22.824219 z M 60.488281 38.824219 C 63.644281 38.836219 66.199219 41.387062 66.199219 44.539062 A 5.715 5.715 0 0 1 60.488281 50.253906 A 5.717 5.717 0 0 1 54.773438 44.539062 A 5.725 5.725 0 0 1 60.488281 38.824219 z "/>
+ <path d="m 134.226,94.281 c 0,0 0.445,2.621 -0.574,7.356 -1.024,4.796 -2.559,7.351 -2.559,7.351 a 31.76,31.76 0 0 1 -10.809,1.922 c -3.773,0 -7.16,-0.707 -9.976,-1.922 0,0 -0.383,-1.726 0.129,-4.988 1.406,0.387 6.457,1.793 10.933,1.793 2.497,0 5.375,-0.703 5.375,-0.703 0,0 0.704,-1.153 1.149,-3.453 0.512,-2.305 0.32,-3.454 0.32,-3.454 0,0 -2.558,-0.703 -5.051,-0.703 -3.902,0 -7.226,-0.64 -10.039,-1.855 0,0 -0.386,-2.879 0.383,-6.524 0.766,-3.707 2.43,-6.585 2.43,-6.585 3.324,-1.153 7.035,-1.856 10.808,-1.856 3.77,0 7.161,0.703 9.973,1.856 0,0.128 0.387,2.046 -0.062,5.179 -1.536,-0.449 -7.165,-1.918 -11,-1.918 -2.493,0 -5.372,0.703 -5.372,0.703 0,0 -0.64,1.024 -0.957,2.621 -0.32,1.598 -0.195,2.622 -0.195,2.622 0,0 2.496,0.64 5.117,0.703 3.774,0 7.164,0.64 9.977,1.855 z"/>
+ <path d="m 105.511,80.66 c 1.984,0 3.84,0.191 5.629,0.578 -0.703,1.727 -1.469,3.324 -2.367,4.86 -1.406,-0.192 -2.813,-0.321 -4.348,-0.321 -2.492,0 -5.242,0.703 -5.242,0.703 0,0 -1.856,6.075 -2.496,9.274 L 93.62,110.269 H 87.8 l 3.07,-14.515 a 84.252,84.252 0 0 1 3.836,-13.238 c 3.325,-1.153 7.035,-1.856 10.805,-1.856 z"/>
+ <path d="m 77.374,105.793 c 2.817,0 5.629,-0.512 7.867,-1.024 -0.765,1.981 -1.664,3.774 -2.621,5.5 -2.046,0.383 -4.156,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.91,-1.922 0.125,-3.965 0.766,-8.441 1.789,-13.234 l 1.918,-9.082 h -6.457 c 0.703,-1.789 1.469,-3.453 2.492,-5.051 h 5.055 l 1.34,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.719 l -1.922,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="m 49.878,105.793 c 2.813,0 5.629,-0.512 7.867,-1.024 -0.769,1.981 -1.664,3.774 -2.621,5.5 -2.047,0.383 -4.16,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.914,-1.922 0.129,-3.965 0.77,-8.441 1.793,-13.234 l 1.918,-9.082 h -6.461 c 0.707,-1.789 1.473,-3.453 2.496,-5.051 h 5.051 l 1.344,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.723 l -1.918,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="M 22.574219 80.660156 C 18.800219 80.660156 15.093625 81.362625 11.765625 82.515625 C 11.128625 84.112625 10.616109 85.715406 10.037109 87.441406 C 12.022109 86.863406 16.624281 85.777344 21.488281 85.777344 C 23.980281 85.777344 26.476562 86.480469 26.476562 86.480469 C 26.476562 86.480469 26.089531 89.101062 25.644531 91.789062 C 23.980531 91.469062 22.191938 91.277344 20.335938 91.277344 A 32.101 32.101 0 0 0 9.4648438 93.195312 C 9.4648437 93.195312 7.8003437 96.328 6.7773438 101.125 C 5.7533437 105.855 6.140625 108.98828 6.140625 108.98828 C 8.952625 110.20328 12.343281 110.91016 16.113281 110.91016 A 31.74 31.74 0 0 0 26.921875 108.98828 C 28.456875 105.02328 29.734813 100.54691 30.757812 95.753906 C 31.718813 91.018906 32.359781 86.542625 32.550781 82.515625 C 29.734781 81.362625 26.347219 80.660156 22.574219 80.660156 z M 19.248047 96.390625 A 21.116 21.116 0 0 1 24.619141 97.09375 C 23.850141 100.42175 22.511719 105.08984 22.511719 105.08984 C 22.511719 105.08984 19.951172 105.79297 17.201172 105.79297 C 14.705172 105.79297 12.152344 105.08984 12.152344 105.08984 C 12.152344 105.08984 12.085656 103.426 12.597656 101.125 C 13.109656 98.758 13.8125 97.09375 13.8125 97.09375 C 13.8125 97.09375 16.752047 96.390625 19.248047 96.390625 z "/>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/api.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/api.rst
new file mode 100644
index 0000000000..02aed52ad5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/api.rst
@@ -0,0 +1,826 @@
+API Reference
+=============
+
+.. currentmodule:: attr
+
+``attrs`` works by decorating a class using `attrs.define` or `attr.s` and then optionally defining attributes on the class using `attrs.field`, `attr.ib`, or a type annotation.
+
+If you're confused by the many names, please check out `names` for clarification.
+
+What follows is the API explanation, if you'd like a more hands-on introduction, have a look at `examples`.
+
+As of version 21.3.0, ``attrs`` consists of **two** to-level package names:
+
+- The classic ``attr`` that powered the venerable `attr.s` and `attr.ib`
+- The modern ``attrs`` that only contains most modern APIs and relies on `attrs.define` and `attrs.field` to define your classes.
+ Additionally it offers some ``attr`` APIs with nicer defaults (e.g. `attrs.asdict`).
+ Using this namespace requires Python 3.6 or later.
+
+The ``attrs`` namespace is built *on top of* ``attr`` which will *never* go away.
+
+
+Core
+----
+
+.. note::
+
+ Please note that the ``attrs`` namespace has been added in version 21.3.0.
+ Most of the objects are simply re-imported from ``attr``.
+ Therefore if a class, method, or function claims that it has been added in an older version, it is only available in the ``attr`` namespace.
+
+.. autodata:: attrs.NOTHING
+
+.. autofunction:: attrs.define
+
+.. function:: attrs.mutable(same_as_define)
+
+ Alias for `attrs.define`.
+
+ .. versionadded:: 20.1.0
+
+.. function:: attrs.frozen(same_as_define)
+
+ Behaves the same as `attrs.define` but sets *frozen=True* and *on_setattr=None*.
+
+ .. versionadded:: 20.1.0
+
+.. autofunction:: attrs.field
+
+.. function:: define
+
+ Old import path for `attrs.define`.
+
+.. function:: mutable
+
+ Old import path for `attrs.mutable`.
+
+.. function:: frozen
+
+ Old import path for `attrs.frozen`.
+
+.. function:: field
+
+ Old import path for `attrs.field`.
+
+.. autoclass:: attrs.Attribute
+ :members: evolve
+
+ For example:
+
+ .. doctest::
+
+ >>> import attr
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ >>> attr.fields(C).x
+ Attribute(name='x', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)
+
+
+.. autofunction:: attrs.make_class
+
+ This is handy if you want to programmatically create classes.
+
+ For example:
+
+ .. doctest::
+
+ >>> C1 = attr.make_class("C1", ["x", "y"])
+ >>> C1(1, 2)
+ C1(x=1, y=2)
+ >>> C2 = attr.make_class("C2", {"x": attr.ib(default=42),
+ ... "y": attr.ib(default=attr.Factory(list))})
+ >>> C2()
+ C2(x=42, y=[])
+
+
+.. autoclass:: attrs.Factory
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(default=attr.Factory(list))
+ ... y = attr.ib(default=attr.Factory(
+ ... lambda self: set(self.x),
+ ... takes_self=True)
+ ... )
+ >>> C()
+ C(x=[], y=set())
+ >>> C([1, 2, 3])
+ C(x=[1, 2, 3], y={1, 2, 3})
+
+
+Classic
+~~~~~~~
+
+.. data:: attr.NOTHING
+
+ Same as `attrs.NOTHING`.
+
+.. autofunction:: attr.s(these=None, repr_ns=None, repr=None, cmp=None, hash=None, init=None, slots=False, frozen=False, weakref_slot=True, str=False, auto_attribs=False, kw_only=False, cache_hash=False, auto_exc=False, eq=None, order=None, auto_detect=False, collect_by_mro=False, getstate_setstate=None, on_setattr=None, field_transformer=None, match_args=True)
+
+ .. note::
+
+ ``attrs`` also comes with a serious business alias ``attr.attrs``.
+
+ For example:
+
+ .. doctest::
+
+ >>> import attr
+ >>> @attr.s
+ ... class C(object):
+ ... _private = attr.ib()
+ >>> C(private=42)
+ C(_private=42)
+ >>> class D(object):
+ ... def __init__(self, x):
+ ... self.x = x
+ >>> D(1)
+ <D object at ...>
+ >>> D = attr.s(these={"x": attr.ib()}, init=False)(D)
+ >>> D(1)
+ D(x=1)
+ >>> @attr.s(auto_exc=True)
+ ... class Error(Exception):
+ ... x = attr.ib()
+ ... y = attr.ib(default=42, init=False)
+ >>> Error("foo")
+ Error(x='foo', y=42)
+ >>> raise Error("foo")
+ Traceback (most recent call last):
+ ...
+ Error: ('foo', 42)
+ >>> raise ValueError("foo", 42) # for comparison
+ Traceback (most recent call last):
+ ...
+ ValueError: ('foo', 42)
+
+
+.. autofunction:: attr.ib
+
+ .. note::
+
+ ``attrs`` also comes with a serious business alias ``attr.attrib``.
+
+ The object returned by `attr.ib` also allows for setting the default and the validator using decorators:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ ... y = attr.ib()
+ ... @x.validator
+ ... def _any_name_except_a_name_of_an_attribute(self, attribute, value):
+ ... if value < 0:
+ ... raise ValueError("x must be positive")
+ ... @y.default
+ ... def _any_name_except_a_name_of_an_attribute(self):
+ ... return self.x + 1
+ >>> C(1)
+ C(x=1, y=2)
+ >>> C(-1)
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be positive
+
+
+
+Exceptions
+----------
+
+All exceptions are available from both ``attr.exceptions`` and ``attrs.exceptions`` and are the same thing.
+That means that it doesn't matter from from which namespace they've been raised and/or caught:
+
+.. doctest::
+
+ >>> import attrs, attr
+ >>> try:
+ ... raise attrs.exceptions.FrozenError()
+ ... except attr.exceptions.FrozenError:
+ ... print("this works!")
+ this works!
+
+.. autoexception:: attrs.exceptions.PythonTooOldError
+.. autoexception:: attrs.exceptions.FrozenError
+.. autoexception:: attrs.exceptions.FrozenInstanceError
+.. autoexception:: attrs.exceptions.FrozenAttributeError
+.. autoexception:: attrs.exceptions.AttrsAttributeNotFoundError
+.. autoexception:: attrs.exceptions.NotAnAttrsClassError
+.. autoexception:: attrs.exceptions.DefaultAlreadySetError
+.. autoexception:: attrs.exceptions.UnannotatedAttributeError
+.. autoexception:: attrs.exceptions.NotCallableError
+
+ For example::
+
+ @attr.s(auto_attribs=True)
+ class C:
+ x: int
+ y = attr.ib() # <- ERROR!
+
+
+.. _helpers:
+
+Helpers
+-------
+
+``attrs`` comes with a bunch of helper methods that make working with it easier:
+
+.. autofunction:: attrs.cmp_using
+.. function:: attr.cmp_using
+
+ Same as `attrs.cmp_using`.
+
+.. autofunction:: attrs.fields
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ ... y = attr.ib()
+ >>> attrs.fields(C)
+ (Attribute(name='x', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None), Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None))
+ >>> attrs.fields(C)[1]
+ Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)
+ >>> attrs.fields(C).y is attrs.fields(C)[1]
+ True
+
+.. function:: attr.fields
+
+ Same as `attrs.fields`.
+
+.. autofunction:: attrs.fields_dict
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ ... y = attr.ib()
+ >>> attrs.fields_dict(C)
+ {'x': Attribute(name='x', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None), 'y': Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)}
+ >>> attr.fields_dict(C)['y']
+ Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)
+ >>> attrs.fields_dict(C)['y'] is attrs.fields(C).y
+ True
+
+.. function:: attr.fields_dict
+
+ Same as `attrs.fields_dict`.
+
+.. autofunction:: attrs.has
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... pass
+ >>> attr.has(C)
+ True
+ >>> attr.has(object)
+ False
+
+.. function:: attr.has
+
+ Same as `attrs.has`.
+
+.. autofunction:: attrs.resolve_types
+
+ For example:
+
+ .. doctest::
+
+ >>> import typing
+ >>> @attrs.define
+ ... class A:
+ ... a: typing.List['A']
+ ... b: 'B'
+ ...
+ >>> @attrs.define
+ ... class B:
+ ... a: A
+ ...
+ >>> attrs.fields(A).a.type
+ typing.List[ForwardRef('A')]
+ >>> attrs.fields(A).b.type
+ 'B'
+ >>> attrs.resolve_types(A, globals(), locals())
+ <class 'A'>
+ >>> attrs.fields(A).a.type
+ typing.List[A]
+ >>> attrs.fields(A).b.type
+ <class 'B'>
+
+.. function:: attr.resolve_types
+
+ Same as `attrs.resolve_types`.
+
+.. autofunction:: attrs.asdict
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x: int
+ ... y: int
+ >>> attrs.asdict(C(1, C(2, 3)))
+ {'x': 1, 'y': {'x': 2, 'y': 3}}
+
+.. autofunction:: attr.asdict
+
+.. autofunction:: attrs.astuple
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attr.field()
+ ... y = attr.field()
+ >>> attrs.astuple(C(1,2))
+ (1, 2)
+
+.. autofunction:: attr.astuple
+
+
+``attrs`` includes some handy helpers for filtering the attributes in `attrs.asdict` and `attrs.astuple`:
+
+.. autofunction:: attrs.filters.include
+
+.. autofunction:: attrs.filters.exclude
+
+.. function:: attr.filters.include
+
+ Same as `attrs.filters.include`.
+
+.. function:: attr.filters.exclude
+
+ Same as `attrs.filters.exclude`.
+
+See :func:`attrs.asdict` for examples.
+
+All objects from ``attrs.filters`` are also available from ``attr.filters``.
+
+----
+
+.. autofunction:: attrs.evolve
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x: int
+ ... y: int
+ >>> i1 = C(1, 2)
+ >>> i1
+ C(x=1, y=2)
+ >>> i2 = attrs.evolve(i1, y=3)
+ >>> i2
+ C(x=1, y=3)
+ >>> i1 == i2
+ False
+
+ ``evolve`` creates a new instance using ``__init__``.
+ This fact has several implications:
+
+ * private attributes should be specified without the leading underscore, just like in ``__init__``.
+ * attributes with ``init=False`` can't be set with ``evolve``.
+ * the usual ``__init__`` validators will validate the new values.
+
+.. function:: attr.evolve
+
+ Same as `attrs.evolve`.
+
+.. autofunction:: attrs.validate
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define(on_setattr=attrs.setters.NO_OP)
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.instance_of(int))
+ >>> i = C(1)
+ >>> i.x = "1"
+ >>> attrs.validate(i)
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '1' that is a <class 'str'>).", ...)
+
+.. function:: attr.validate
+
+ Same as `attrs.validate`.
+
+
+Validators can be globally disabled if you want to run them only in development and tests but not in production because you fear their performance impact:
+
+.. autofunction:: set_run_validators
+
+.. autofunction:: get_run_validators
+
+
+.. _api_validators:
+
+Validators
+----------
+
+``attrs`` comes with some common validators in the ``attrs.validators`` module.
+All objects from ``attrs.converters`` are also available from ``attr.converters``.
+
+
+.. autofunction:: attrs.validators.lt
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.lt(42))
+ >>> C(41)
+ C(x=41)
+ >>> C(42)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be < 42: 42")
+
+.. autofunction:: attrs.validators.le
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C(object):
+ ... x = attrs.field(validator=attr.validators.le(42))
+ >>> C(42)
+ C(x=42)
+ >>> C(43)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be <= 42: 43")
+
+.. autofunction:: attrs.validators.ge
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.ge(42))
+ >>> C(42)
+ C(x=42)
+ >>> C(41)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be => 42: 41")
+
+.. autofunction:: attrs.validators.gt
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attr.field(validator=attrs.validators.gt(42))
+ >>> C(43)
+ C(x=43)
+ >>> C(42)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be > 42: 42")
+
+.. autofunction:: attrs.validators.max_len
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.max_len(4))
+ >>> C("spam")
+ C(x='spam')
+ >>> C("bacon")
+ Traceback (most recent call last):
+ ...
+ ValueError: ("Length of 'x' must be <= 4: 5")
+
+.. autofunction:: attrs.validators.instance_of
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.instance_of(int))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None, kw_only=False), <type 'int'>, '42')
+ >>> C(None)
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got None that is a <type 'NoneType'>).", Attribute(name='x', default=NOTHING, validator=<instance_of validator for type <type 'int'>>, repr=True, cmp=True, hash=None, init=True, type=None, kw_only=False), <type 'int'>, None)
+
+.. autofunction:: attrs.validators.in_
+
+ For example:
+
+ .. doctest::
+
+ >>> import enum
+ >>> class State(enum.Enum):
+ ... ON = "on"
+ ... OFF = "off"
+ >>> @attrs.define
+ ... class C:
+ ... state = attrs.field(validator=attrs.validators.in_(State))
+ ... val = attrs.field(validator=attrs.validators.in_([1, 2, 3]))
+ >>> C(State.ON, 1)
+ C(state=<State.ON: 'on'>, val=1)
+ >>> C("on", 1)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'state' must be in <enum 'State'> (got 'on')
+ >>> C(State.ON, 4)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'val' must be in [1, 2, 3] (got 4)
+
+.. autofunction:: attrs.validators.provides
+
+.. autofunction:: attrs.validators.and_
+
+ For convenience, it's also possible to pass a list to `attrs.field`'s validator argument.
+
+ Thus the following two statements are equivalent::
+
+ x = attrs.field(validator=attrs.validators.and_(v1, v2, v3))
+ x = attrs.field(validator=[v1, v2, v3])
+
+.. autofunction:: attrs.validators.optional
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.optional(attr.validators.instance_of(int)))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None, kw_only=False), <type 'int'>, '42')
+ >>> C(None)
+ C(x=None)
+
+
+.. autofunction:: attrs.validators.is_callable
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.is_callable())
+ >>> C(isinstance)
+ C(x=<built-in function isinstance>)
+ >>> C("not a callable")
+ Traceback (most recent call last):
+ ...
+ attr.exceptions.NotCallableError: 'x' must be callable (got 'not a callable' that is a <class 'str'>).
+
+
+.. autofunction:: attrs.validators.matches_re
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class User:
+ ... email = attrs.field(validator=attrs.validators.matches_re(
+ ... "(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"))
+ >>> User(email="user@example.com")
+ User(email='user@example.com')
+ >>> User(email="user@example.com@test.com")
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'email' must match regex '(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\\\.[a-zA-Z0-9-.]+$)' ('user@example.com@test.com' doesn't)", Attribute(name='email', default=NOTHING, validator=<matches_re validator for pattern re.compile('(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z0-9-.]+$)')>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), re.compile('(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z0-9-.]+$)'), 'user@example.com@test.com')
+
+
+.. autofunction:: attrs.validators.deep_iterable
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.deep_iterable(
+ ... member_validator=attrs.validators.instance_of(int),
+ ... iterable_validator=attrs.validators.instance_of(list)
+ ... ))
+ >>> C(x=[1, 2, 3])
+ C(x=[1, 2, 3])
+ >>> C(x=set([1, 2, 3]))
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'list'> (got {1, 2, 3} that is a <class 'set'>).", Attribute(name='x', default=NOTHING, validator=<deep_iterable validator for <instance_of validator for type <class 'list'>> iterables of <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'list'>, {1, 2, 3})
+ >>> C(x=[1, 2, "3"])
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '3' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=<deep_iterable validator for <instance_of validator for type <class 'list'>> iterables of <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'int'>, '3')
+
+
+.. autofunction:: attrs.validators.deep_mapping
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.deep_mapping(
+ ... key_validator=attrs.validators.instance_of(str),
+ ... value_validator=attrs.validators.instance_of(int),
+ ... mapping_validator=attrs.validators.instance_of(dict)
+ ... ))
+ >>> C(x={"a": 1, "b": 2})
+ C(x={'a': 1, 'b': 2})
+ >>> C(x=None)
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'dict'> (got None that is a <class 'NoneType'>).", Attribute(name='x', default=NOTHING, validator=<deep_mapping validator for objects mapping <instance_of validator for type <class 'str'>> to <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'dict'>, None)
+ >>> C(x={"a": 1.0, "b": 2})
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got 1.0 that is a <class 'float'>).", Attribute(name='x', default=NOTHING, validator=<deep_mapping validator for objects mapping <instance_of validator for type <class 'str'>> to <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'int'>, 1.0)
+ >>> C(x={"a": 1, 7: 2})
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'str'> (got 7 that is a <class 'int'>).", Attribute(name='x', default=NOTHING, validator=<deep_mapping validator for objects mapping <instance_of validator for type <class 'str'>> to <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'str'>, 7)
+
+Validators can be both globally and locally disabled:
+
+.. autofunction:: attrs.validators.set_disabled
+
+.. autofunction:: attrs.validators.get_disabled
+
+.. autofunction:: attrs.validators.disabled
+
+
+Converters
+----------
+
+All objects from ``attrs.converters`` are also available from ``attr.converters``.
+
+.. autofunction:: attrs.converters.pipe
+
+ For convenience, it's also possible to pass a list to `attr.ib`'s converter argument.
+
+ Thus the following two statements are equivalent::
+
+ x = attr.ib(converter=attr.converter.pipe(c1, c2, c3))
+ x = attr.ib(converter=[c1, c2, c3])
+
+.. autofunction:: attrs.converters.optional
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(converter=attr.converters.optional(int))
+ >>> C(None)
+ C(x=None)
+ >>> C(42)
+ C(x=42)
+
+
+.. autofunction:: attrs.converters.default_if_none
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(
+ ... converter=attr.converters.default_if_none("")
+ ... )
+ >>> C(None)
+ C(x='')
+
+
+.. autofunction:: attrs.converters.to_bool
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(
+ ... converter=attr.converters.to_bool
+ ... )
+ >>> C("yes")
+ C(x=True)
+ >>> C(0)
+ C(x=False)
+ >>> C("foo")
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ValueError: Cannot convert value to bool: foo
+
+
+
+.. _api_setters:
+
+Setters
+-------
+
+These are helpers that you can use together with `attrs.define`'s and `attrs.fields`'s ``on_setattr`` arguments.
+All setters in ``attrs.setters`` are also available from ``attr.setters``.
+
+.. autofunction:: attrs.setters.frozen
+.. autofunction:: attrs.setters.validate
+.. autofunction:: attrs.setters.convert
+.. autofunction:: attrs.setters.pipe
+.. autodata:: attrs.setters.NO_OP
+
+ For example, only ``x`` is frozen here:
+
+ .. doctest::
+
+ >>> @attrs.define(on_setattr=attr.setters.frozen)
+ ... class C:
+ ... x = attr.field()
+ ... y = attr.field(on_setattr=attr.setters.NO_OP)
+ >>> c = C(1, 2)
+ >>> c.y = 3
+ >>> c.y
+ 3
+ >>> c.x = 4
+ Traceback (most recent call last):
+ ...
+ attrs.exceptions.FrozenAttributeError: ()
+
+ N.B. Please use `attrs.define`'s *frozen* argument (or `attrs.frozen`) to freeze whole classes; it is more efficient.
+
+
+Deprecated APIs
+---------------
+
+.. _version-info:
+
+To help you write backward compatible code that doesn't throw warnings on modern releases, the ``attr`` module has an ``__version_info__`` attribute as of version 19.2.0.
+It behaves similarly to `sys.version_info` and is an instance of `VersionInfo`:
+
+.. autoclass:: VersionInfo
+
+ With its help you can write code like this:
+
+ >>> if getattr(attr, "__version_info__", (0,)) >= (19, 2):
+ ... cmp_off = {"eq": False}
+ ... else:
+ ... cmp_off = {"cmp": False}
+ >>> cmp_off == {"eq": False}
+ True
+ >>> @attr.s(**cmp_off)
+ ... class C(object):
+ ... pass
+
+
+----
+
+The serious business aliases used to be called ``attr.attributes`` and ``attr.attr``.
+There are no plans to remove them but they shouldn't be used in new code.
+
+.. autofunction:: assoc
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst
new file mode 100644
index 0000000000..565b0521d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst
@@ -0,0 +1 @@
+.. include:: ../CHANGELOG.rst
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst
new file mode 100644
index 0000000000..760124ca3b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst
@@ -0,0 +1,66 @@
+Comparison
+==========
+
+By default, two instances of ``attrs`` classes are equal if all their fields are equal.
+For that, ``attrs`` writes ``__eq__`` and ``__ne__`` methods for you.
+
+Additionally, if you pass ``order=True`` (which is the default if you use the `attr.s` decorator), ``attrs`` will also create a full set of ordering methods that are based on the defined fields: ``__le__``, ``__lt__``, ``__ge__``, and ``__gt__``.
+
+
+.. _custom-comparison:
+
+Customization
+-------------
+
+As with other features, you can exclude fields from being involved in comparison operations:
+
+.. doctest::
+
+ >>> from attr import define, field
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... y: int = field(eq=False)
+
+ >>> C(1, 2) == C(1, 3)
+ True
+
+Additionally you can also pass a *callable* instead of a bool to both *eq* and *order*.
+It is then used as a key function like you may know from `sorted`:
+
+.. doctest::
+
+ >>> from attr import define, field
+
+ >>> @define
+ ... class S:
+ ... x: str = field(eq=str.lower)
+
+ >>> S("foo") == S("FOO")
+ True
+
+ >>> @define(order=True)
+ ... class C:
+ ... x: str = field(order=int)
+
+ >>> C("10") > C("2")
+ True
+
+This is especially useful when you have fields with objects that have atypical comparison properties.
+Common examples of such objects are `NumPy arrays <https://github.com/python-attrs/attrs/issues/435>`_.
+
+To save you unnecessary boilerplate, ``attrs`` comes with the `attr.cmp_using` helper to create such functions.
+For NumPy arrays it would look like this::
+
+ import numpy
+
+ @define(order=False)
+ class C:
+ an_array = field(eq=attr.cmp_using(eq=numpy.array_equal))
+
+
+.. warning::
+
+ Please note that *eq* and *order* are set *independently*, because *order* is `False` by default in `attrs.define` (but not in `attr.s`).
+ You can set both at once by using the *cmp* argument that we've undeprecated just for this use-case.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/conf.py b/testing/web-platform/tests/tools/third_party/attrs/docs/conf.py
new file mode 100644
index 0000000000..0cc80be6a6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/conf.py
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: MIT
+
+from importlib import metadata
+
+
+# -- General configuration ------------------------------------------------
+
+doctest_global_setup = """
+from attr import define, frozen, field, validators, Factory
+"""
+
+linkcheck_ignore = [
+ # We run into GitHub's rate limits.
+ r"https://github.com/.*/(issues|pull)/\d+",
+ # It never finds the anchor even though it's there.
+ "https://github.com/microsoft/pyright/blob/main/specs/"
+ "dataclass_transforms.md#attrs",
+]
+
+# In nitpick mode (-n), still ignore any of the following "broken" references
+# to non-types.
+nitpick_ignore = [
+ ("py:class", "Any value"),
+ ("py:class", "callable"),
+ ("py:class", "callables"),
+ ("py:class", "tuple of types"),
+]
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.doctest",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.todo",
+ "notfound.extension",
+]
+
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+project = "attrs"
+author = "Hynek Schlawack"
+copyright = f"2015, {author}"
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+
+# The full version, including alpha/beta/rc tags.
+release = metadata.version("attrs")
+# The short X.Y version.
+version = release.rsplit(".", 1)[0]
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+default_role = "any"
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+add_function_parentheses = True
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+
+html_theme = "furo"
+html_theme_options = {
+ "sidebar_hide_name": True,
+ "light_logo": "attrs_logo.svg",
+ "dark_logo": "attrs_logo_white.svg",
+}
+
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# If false, no module index is generated.
+html_domain_indices = True
+
+# If false, no index is generated.
+html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+html_show_sourcelink = False
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "attrsdoc"
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [("index", "attrs", "attrs Documentation", ["Hynek Schlawack"], 1)]
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (
+ "index",
+ "attrs",
+ "attrs Documentation",
+ "Hynek Schlawack",
+ "attrs",
+ "Python Clases Without Boilerplate",
+ "Miscellaneous",
+ )
+]
+
+epub_description = "Python Clases Without Boilerplate"
+
+intersphinx_mapping = {
+ "https://docs.python.org/3": None,
+}
+
+# Allow non-local URIs so we can have images in CHANGELOG etc.
+suppress_warnings = ["image.nonlocal_uri"]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf b/testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf
new file mode 100644
index 0000000000..db8ca82c74
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf
@@ -0,0 +1,3 @@
+[parsers]
+[restructuredtext parser]
+smart_quotes=yes
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst
new file mode 100644
index 0000000000..ba5343d4ad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst
@@ -0,0 +1,709 @@
+``attrs`` by Example
+====================
+
+
+Basics
+------
+
+The simplest possible usage is:
+
+.. doctest::
+
+ >>> from attrs import define
+ >>> @define
+ ... class Empty:
+ ... pass
+ >>> Empty()
+ Empty()
+ >>> Empty() == Empty()
+ True
+ >>> Empty() is Empty()
+ False
+
+So in other words: ``attrs`` is useful even without actual attributes!
+
+But you'll usually want some data on your classes, so let's add some:
+
+.. doctest::
+
+ >>> @define
+ ... class Coordinates:
+ ... x: int
+ ... y: int
+
+By default, all features are added, so you immediately have a fully functional data class with a nice ``repr`` string and comparison methods.
+
+.. doctest::
+
+ >>> c1 = Coordinates(1, 2)
+ >>> c1
+ Coordinates(x=1, y=2)
+ >>> c2 = Coordinates(x=2, y=1)
+ >>> c2
+ Coordinates(x=2, y=1)
+ >>> c1 == c2
+ False
+
+As shown, the generated ``__init__`` method allows for both positional and keyword arguments.
+
+For private attributes, ``attrs`` will strip the leading underscores for keyword arguments:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... _x: int
+ >>> C(x=1)
+ C(_x=1)
+
+If you want to initialize your private attributes yourself, you can do that too:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... _x: int = field(init=False, default=42)
+ >>> C()
+ C(_x=42)
+ >>> C(23)
+ Traceback (most recent call last):
+ ...
+ TypeError: __init__() takes exactly 1 argument (2 given)
+
+An additional way of defining attributes is supported too.
+This is useful in times when you want to enhance classes that are not yours (nice ``__repr__`` for Django models anyone?):
+
+.. doctest::
+
+ >>> class SomethingFromSomeoneElse:
+ ... def __init__(self, x):
+ ... self.x = x
+ >>> SomethingFromSomeoneElse = define(
+ ... these={
+ ... "x": field()
+ ... }, init=False)(SomethingFromSomeoneElse)
+ >>> SomethingFromSomeoneElse(1)
+ SomethingFromSomeoneElse(x=1)
+
+
+`Subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, but ``attrs`` will still do what you'd hope for:
+
+.. doctest::
+
+ >>> @define(slots=False)
+ ... class A:
+ ... a: int
+ ... def get_a(self):
+ ... return self.a
+ >>> @define(slots=False)
+ ... class B:
+ ... b: int
+ >>> @define(slots=False)
+ ... class C(B, A):
+ ... c: int
+ >>> i = C(1, 2, 3)
+ >>> i
+ C(a=1, b=2, c=3)
+ >>> i == C(1, 2, 3)
+ True
+ >>> i.get_a()
+ 1
+
+:term:`Slotted classes <slotted classes>`, which are the default for the new APIs, don't play well with multiple inheritance so we don't use them in the example.
+
+The order of the attributes is defined by the `MRO <https://www.python.org/download/releases/2.3/mro/>`_.
+
+Keyword-only Attributes
+~~~~~~~~~~~~~~~~~~~~~~~
+
+You can also add `keyword-only <https://docs.python.org/3/glossary.html#keyword-only-parameter>`_ attributes:
+
+.. doctest::
+
+ >>> @define
+ ... class A:
+ ... a: int = field(kw_only=True)
+ >>> A()
+ Traceback (most recent call last):
+ ...
+ TypeError: A() missing 1 required keyword-only argument: 'a'
+ >>> A(a=1)
+ A(a=1)
+
+``kw_only`` may also be specified at via ``define``, and will apply to all attributes:
+
+.. doctest::
+
+ >>> @define(kw_only=True)
+ ... class A:
+ ... a: int
+ ... b: int
+ >>> A(1, 2)
+ Traceback (most recent call last):
+ ...
+ TypeError: __init__() takes 1 positional argument but 3 were given
+ >>> A(a=1, b=2)
+ A(a=1, b=2)
+
+
+
+If you create an attribute with ``init=False``, the ``kw_only`` argument is ignored.
+
+Keyword-only attributes allow subclasses to add attributes without default values, even if the base class defines attributes with default values:
+
+.. doctest::
+
+ >>> @define
+ ... class A:
+ ... a: int = 0
+ >>> @define
+ ... class B(A):
+ ... b: int = field(kw_only=True)
+ >>> B(b=1)
+ B(a=0, b=1)
+ >>> B()
+ Traceback (most recent call last):
+ ...
+ TypeError: B() missing 1 required keyword-only argument: 'b'
+
+If you don't set ``kw_only=True``, then there's is no valid attribute ordering and you'll get an error:
+
+.. doctest::
+
+ >>> @define
+ ... class A:
+ ... a: int = 0
+ >>> @define
+ ... class B(A):
+ ... b: int
+ Traceback (most recent call last):
+ ...
+ ValueError: No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: Attribute(name='b', default=NOTHING, validator=None, repr=True, cmp=True, hash=None, init=True, converter=None, metadata=mappingproxy({}), type=int, kw_only=False)
+
+.. _asdict:
+
+Converting to Collections Types
+-------------------------------
+
+When you have a class with data, it often is very convenient to transform that class into a `dict` (for example if you want to serialize it to JSON):
+
+.. doctest::
+
+ >>> from attrs import asdict
+
+ >>> asdict(Coordinates(x=1, y=2))
+ {'x': 1, 'y': 2}
+
+Some fields cannot or should not be transformed.
+For that, `attrs.asdict` offers a callback that decides whether an attribute should be included:
+
+.. doctest::
+
+ >>> @define
+ ... class User(object):
+ ... email: str
+ ... password: str
+
+ >>> @define
+ ... class UserList:
+ ... users: list[User]
+
+ >>> asdict(UserList([User("jane@doe.invalid", "s33kred"),
+ ... User("joe@doe.invalid", "p4ssw0rd")]),
+ ... filter=lambda attr, value: attr.name != "password")
+ {'users': [{'email': 'jane@doe.invalid'}, {'email': 'joe@doe.invalid'}]}
+
+For the common case where you want to `include <attr.filters.include>` or `exclude <attr.filters.exclude>` certain types or attributes, ``attrs`` ships with a few helpers:
+
+.. doctest::
+
+ >>> from attrs import asdict, filters, fields
+
+ >>> @define
+ ... class User:
+ ... login: str
+ ... password: str
+ ... id: int
+
+ >>> asdict(
+ ... User("jane", "s33kred", 42),
+ ... filter=filters.exclude(fields(User).password, int))
+ {'login': 'jane'}
+
+ >>> @define
+ ... class C:
+ ... x: str
+ ... y: str
+ ... z: int
+
+ >>> asdict(C("foo", "2", 3),
+ ... filter=filters.include(int, fields(C).x))
+ {'x': 'foo', 'z': 3}
+
+Other times, all you want is a tuple and ``attrs`` won't let you down:
+
+.. doctest::
+
+ >>> import sqlite3
+ >>> from attrs import astuple
+
+ >>> @define
+ ... class Foo:
+ ... a: int
+ ... b: int
+
+ >>> foo = Foo(2, 3)
+ >>> with sqlite3.connect(":memory:") as conn:
+ ... c = conn.cursor()
+ ... c.execute("CREATE TABLE foo (x INTEGER PRIMARY KEY ASC, y)") #doctest: +ELLIPSIS
+ ... c.execute("INSERT INTO foo VALUES (?, ?)", astuple(foo)) #doctest: +ELLIPSIS
+ ... foo2 = Foo(*c.execute("SELECT x, y FROM foo").fetchone())
+ <sqlite3.Cursor object at ...>
+ <sqlite3.Cursor object at ...>
+ >>> foo == foo2
+ True
+
+For more advanced transformations and conversions, we recommend you look at a companion library (such as `cattrs <https://github.com/python-attrs/cattrs>`_).
+
+Defaults
+--------
+
+Sometimes you want to have default values for your initializer.
+And sometimes you even want mutable objects as default values (ever accidentally used ``def f(arg=[])``?).
+``attrs`` has you covered in both cases:
+
+.. doctest::
+
+ >>> import collections
+
+ >>> @define
+ ... class Connection:
+ ... socket: int
+ ... @classmethod
+ ... def connect(cls, db_string):
+ ... # ... connect somehow to db_string ...
+ ... return cls(socket=42)
+
+ >>> @define
+ ... class ConnectionPool:
+ ... db_string: str
+ ... pool: collections.deque = Factory(collections.deque)
+ ... debug: bool = False
+ ... def get_connection(self):
+ ... try:
+ ... return self.pool.pop()
+ ... except IndexError:
+ ... if self.debug:
+ ... print("New connection!")
+ ... return Connection.connect(self.db_string)
+ ... def free_connection(self, conn):
+ ... if self.debug:
+ ... print("Connection returned!")
+ ... self.pool.appendleft(conn)
+ ...
+ >>> cp = ConnectionPool("postgres://localhost")
+ >>> cp
+ ConnectionPool(db_string='postgres://localhost', pool=deque([]), debug=False)
+ >>> conn = cp.get_connection()
+ >>> conn
+ Connection(socket=42)
+ >>> cp.free_connection(conn)
+ >>> cp
+ ConnectionPool(db_string='postgres://localhost', pool=deque([Connection(socket=42)]), debug=False)
+
+More information on why class methods for constructing objects are awesome can be found in this insightful `blog post <https://web.archive.org/web/20210130220433/http://as.ynchrono.us/2014/12/asynchronous-object-initialization.html>`_.
+
+Default factories can also be set using the ``factory`` argument to ``field``, and using a decorator.
+The method receives the partially initialized instance which enables you to base a default value on other attributes:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = 1
+ ... y: int = field()
+ ... @y.default
+ ... def _any_name_except_a_name_of_an_attribute(self):
+ ... return self.x + 1
+ ... z: list = field(factory=list)
+ >>> C()
+ C(x=1, y=2, z=[])
+
+
+.. _examples_validators:
+
+Validators
+----------
+
+Although your initializers should do as little as possible (ideally: just initialize your instance according to the arguments!), it can come in handy to do some kind of validation on the arguments.
+
+``attrs`` offers two ways to define validators for each attribute and it's up to you to choose which one suits your style and project better.
+
+You can use a decorator:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field()
+ ... @x.validator
+ ... def check(self, attribute, value):
+ ... if value > 42:
+ ... raise ValueError("x must be smaller or equal to 42")
+ >>> C(42)
+ C(x=42)
+ >>> C(43)
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be smaller or equal to 42
+
+...or a callable...
+
+.. doctest::
+
+ >>> from attrs import validators
+
+ >>> def x_smaller_than_y(instance, attribute, value):
+ ... if value >= instance.y:
+ ... raise ValueError("'x' has to be smaller than 'y'!")
+ >>> @define
+ ... class C:
+ ... x: int = field(validator=[validators.instance_of(int),
+ ... x_smaller_than_y])
+ ... y: int
+ >>> C(x=3, y=4)
+ C(x=3, y=4)
+ >>> C(x=4, y=3)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'x' has to be smaller than 'y'!
+
+...or both at once:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field(validator=validators.instance_of(int))
+ ... @x.validator
+ ... def fits_byte(self, attribute, value):
+ ... if not 0 <= value < 256:
+ ... raise ValueError("value out of bounds")
+ >>> C(128)
+ C(x=128)
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=int, converter=None, kw_only=False), <class 'int'>, '128')
+ >>> C(256)
+ Traceback (most recent call last):
+ ...
+ ValueError: value out of bounds
+
+Please note that the decorator approach only works if -- and only if! -- the attribute in question has a ``field`` assigned.
+Therefore if you use ``@default``, it is *not* enough to annotate said attribute with a type.
+
+``attrs`` ships with a bunch of validators, make sure to `check them out <api_validators>` before writing your own:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field(validator=validators.instance_of(int))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, factory=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None, kw_only=False), <type 'int'>, '42')
+
+Please note that if you use `attr.s` (and not `attrs.define`) to define your class, validators only run on initialization by default.
+This behavior can be changed using the ``on_setattr`` argument.
+
+Check out `validators` for more details.
+
+
+Conversion
+----------
+
+Attributes can have a ``converter`` function specified, which will be called with the attribute's passed-in value to get a new value to use.
+This can be useful for doing type-conversions on values that you don't want to force your callers to do.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field(converter=int)
+ >>> o = C("1")
+ >>> o.x
+ 1
+
+Please note that converters only run on initialization.
+
+Check out `converters` for more details.
+
+
+.. _metadata:
+
+Metadata
+--------
+
+All ``attrs`` attributes may include arbitrary metadata in the form of a read-only dictionary.
+
+.. doctest::
+
+ >>> from attrs import fields
+
+ >>> @define
+ ... class C:
+ ... x = field(metadata={'my_metadata': 1})
+ >>> fields(C).x.metadata
+ mappingproxy({'my_metadata': 1})
+ >>> fields(C).x.metadata['my_metadata']
+ 1
+
+Metadata is not used by ``attrs``, and is meant to enable rich functionality in third-party libraries.
+The metadata dictionary follows the normal dictionary rules: keys need to be hashable, and both keys and values are recommended to be immutable.
+
+If you're the author of a third-party library with ``attrs`` integration, please see `Extending Metadata <extending_metadata>`.
+
+
+Types
+-----
+
+``attrs`` also allows you to associate a type with an attribute using either the *type* argument to `attr.ib` or -- as of Python 3.6 -- using `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_-annotations:
+
+
+.. doctest::
+
+ >>> from attrs import fields
+
+ >>> @define
+ ... class C:
+ ... x: int
+ >>> fields(C).x.type
+ <class 'int'>
+
+ >>> import attr
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(type=int)
+ >>> fields(C).x.type
+ <class 'int'>
+
+If you don't mind annotating *all* attributes, you can even drop the `attrs.field` and assign default values instead:
+
+.. doctest::
+
+ >>> import typing
+ >>> from attrs import fields
+
+ >>> @define
+ ... class AutoC:
+ ... cls_var: typing.ClassVar[int] = 5 # this one is ignored
+ ... l: list[int] = Factory(list)
+ ... x: int = 1
+ ... foo: str = "every attrib needs a type if auto_attribs=True"
+ ... bar: typing.Any = None
+ >>> fields(AutoC).l.type
+ list[int]
+ >>> fields(AutoC).x.type
+ <class 'int'>
+ >>> fields(AutoC).foo.type
+ <class 'str'>
+ >>> fields(AutoC).bar.type
+ typing.Any
+ >>> AutoC()
+ AutoC(l=[], x=1, foo='every attrib needs a type if auto_attribs=True', bar=None)
+ >>> AutoC.cls_var
+ 5
+
+The generated ``__init__`` method will have an attribute called ``__annotations__`` that contains this type information.
+
+If your annotations contain strings (e.g. forward references),
+you can resolve these after all references have been defined by using :func:`attrs.resolve_types`.
+This will replace the *type* attribute in the respective fields.
+
+.. doctest::
+
+ >>> from attrs import fields, resolve_types
+
+ >>> @define
+ ... class A:
+ ... a: 'list[A]'
+ ... b: 'B'
+ ...
+ >>> @define
+ ... class B:
+ ... a: A
+ ...
+ >>> fields(A).a.type
+ 'list[A]'
+ >>> fields(A).b.type
+ 'B'
+ >>> resolve_types(A, globals(), locals())
+ <class 'A'>
+ >>> fields(A).a.type
+ list[A]
+ >>> fields(A).b.type
+ <class 'B'>
+
+.. note::
+
+ If you find yourself using string type annotations to handle forward references, wrap the entire type annotation in quotes instead of only the type you need a forward reference to (so ``'list[A]'`` instead of ``list['A']``).
+ This is a limitation of the Python typing system.
+
+.. warning::
+
+ ``attrs`` itself doesn't have any features that work on top of type metadata *yet*.
+ However it's useful for writing your own validators or serialization frameworks.
+
+
+Slots
+-----
+
+:term:`Slotted classes <slotted classes>` have several advantages on CPython.
+Defining ``__slots__`` by hand is tedious, in ``attrs`` it's just a matter of using `attrs.define` or passing ``slots=True`` to `attr.s`:
+
+.. doctest::
+
+ >>> import attr
+
+ >>> @attr.s(slots=True)
+ ... class Coordinates:
+ ... x: int
+ ... y: int
+
+
+Immutability
+------------
+
+Sometimes you have instances that shouldn't be changed after instantiation.
+Immutability is especially popular in functional programming and is generally a very good thing.
+If you'd like to enforce it, ``attrs`` will try to help:
+
+.. doctest::
+
+ >>> @frozen
+ ... class C:
+ ... x: int
+ >>> i = C(1)
+ >>> i.x = 2
+ Traceback (most recent call last):
+ ...
+ attr.exceptions.FrozenInstanceError: can't set attribute
+ >>> i.x
+ 1
+
+Please note that true immutability is impossible in Python but it will `get <how-frozen>` you 99% there.
+By themselves, immutable classes are useful for long-lived objects that should never change; like configurations for example.
+
+In order to use them in regular program flow, you'll need a way to easily create new instances with changed attributes.
+In Clojure that function is called `assoc <https://clojuredocs.org/clojure.core/assoc>`_ and ``attrs`` shamelessly imitates it: `attr.evolve`:
+
+.. doctest::
+
+ >>> from attrs import evolve
+
+ >>> @frozen
+ ... class C:
+ ... x: int
+ ... y: int
+ >>> i1 = C(1, 2)
+ >>> i1
+ C(x=1, y=2)
+ >>> i2 = evolve(i1, y=3)
+ >>> i2
+ C(x=1, y=3)
+ >>> i1 == i2
+ False
+
+
+Other Goodies
+-------------
+
+Sometimes you may want to create a class programmatically.
+``attrs`` won't let you down and gives you `attrs.make_class` :
+
+.. doctest::
+
+ >>> from attrs import fields, make_class
+ >>> @define
+ ... class C1:
+ ... x = field()
+ ... y = field()
+ >>> C2 = make_class("C2", ["x", "y"])
+ >>> fields(C1) == fields(C2)
+ True
+
+You can still have power over the attributes if you pass a dictionary of name: ``field`` mappings and can pass arguments to ``@attr.s``:
+
+.. doctest::
+
+ >>> from attrs import make_class
+
+ >>> C = make_class("C", {"x": field(default=42),
+ ... "y": field(default=Factory(list))},
+ ... repr=False)
+ >>> i = C()
+ >>> i # no repr added!
+ <__main__.C object at ...>
+ >>> i.x
+ 42
+ >>> i.y
+ []
+
+If you need to dynamically make a class with `attrs.make_class` and it needs to be a subclass of something else than ``object``, use the ``bases`` argument:
+
+.. doctest::
+
+ >>> from attrs import make_class
+
+ >>> class D:
+ ... def __eq__(self, other):
+ ... return True # arbitrary example
+ >>> C = make_class("C", {}, bases=(D,), cmp=False)
+ >>> isinstance(C(), D)
+ True
+
+Sometimes, you want to have your class's ``__init__`` method do more than just
+the initialization, validation, etc. that gets done for you automatically when
+using ``@define``.
+To do this, just define a ``__attrs_post_init__`` method in your class.
+It will get called at the end of the generated ``__init__`` method.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... y: int
+ ... z: int = field(init=False)
+ ...
+ ... def __attrs_post_init__(self):
+ ... self.z = self.x + self.y
+ >>> obj = C(x=1, y=2)
+ >>> obj
+ C(x=1, y=2, z=3)
+
+You can exclude single attributes from certain methods:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... user: str
+ ... password: str = field(repr=False)
+ >>> C("me", "s3kr3t")
+ C(user='me')
+
+Alternatively, to influence how the generated ``__repr__()`` method formats a specific attribute, specify a custom callable to be used instead of the ``repr()`` built-in function:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... user: str
+ ... password: str = field(repr=lambda value: '***')
+ >>> C("me", "s3kr3t")
+ C(user='me', password=***)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst
new file mode 100644
index 0000000000..faf71afd91
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst
@@ -0,0 +1,313 @@
+Extending
+=========
+
+Each ``attrs``-decorated class has a ``__attrs_attrs__`` class attribute.
+It's a tuple of `attrs.Attribute` carrying metadata about each attribute.
+
+So it is fairly simple to build your own decorators on top of ``attrs``:
+
+.. doctest::
+
+ >>> from attr import define
+ >>> def print_attrs(cls):
+ ... print(cls.__attrs_attrs__)
+ ... return cls
+ >>> @print_attrs
+ ... @define
+ ... class C:
+ ... a: int
+ (Attribute(name='a', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=<class 'int'>, converter=None, kw_only=False, inherited=False, on_setattr=None),)
+
+
+.. warning::
+
+ The `attrs.define`/`attr.s` decorator **must** be applied first because it puts ``__attrs_attrs__`` in place!
+ That means that is has to come *after* your decorator because::
+
+ @a
+ @b
+ def f():
+ pass
+
+ is just `syntactic sugar <https://en.wikipedia.org/wiki/Syntactic_sugar>`_ for::
+
+ def original_f():
+ pass
+
+ f = a(b(original_f))
+
+
+Wrapping the Decorator
+----------------------
+
+A more elegant way can be to wrap ``attrs`` altogether and build a class `DSL <https://en.wikipedia.org/wiki/Domain-specific_language>`_ on top of it.
+
+An example for that is the package `environ-config <https://github.com/hynek/environ-config>`_ that uses ``attrs`` under the hood to define environment-based configurations declaratively without exposing ``attrs`` APIs at all.
+
+Another common use case is to overwrite ``attrs``'s defaults.
+
+Mypy
+^^^^
+
+Unfortunately, decorator wrapping currently `confuses <https://github.com/python/mypy/issues/5406>`_ mypy's ``attrs`` plugin.
+At the moment, the best workaround is to hold your nose, write a fake mypy plugin, and mutate a bunch of global variables::
+
+ from mypy.plugin import Plugin
+ from mypy.plugins.attrs import (
+ attr_attrib_makers,
+ attr_class_makers,
+ attr_dataclass_makers,
+ )
+
+ # These work just like `attr.dataclass`.
+ attr_dataclass_makers.add("my_module.method_looks_like_attr_dataclass")
+
+ # This works just like `attr.s`.
+ attr_class_makers.add("my_module.method_looks_like_attr_s")
+
+ # These are our `attr.ib` makers.
+ attr_attrib_makers.add("my_module.method_looks_like_attrib")
+
+ class MyPlugin(Plugin):
+ # Our plugin does nothing but it has to exist so this file gets loaded.
+ pass
+
+
+ def plugin(version):
+ return MyPlugin
+
+
+Then tell mypy about your plugin using your project's ``mypy.ini``:
+
+.. code:: ini
+
+ [mypy]
+ plugins=<path to file>
+
+
+.. warning::
+ Please note that it is currently *impossible* to let mypy know that you've changed defaults like *eq* or *order*.
+ You can only use this trick to tell mypy that a class is actually an ``attrs`` class.
+
+Pyright
+^^^^^^^
+
+Generic decorator wrapping is supported in `pyright <https://github.com/microsoft/pyright>`_ via their dataclass_transform_ specification.
+
+For a custom wrapping of the form::
+
+ def custom_define(f):
+ return attr.define(f)
+
+This is implemented via a ``__dataclass_transform__`` type decorator in the custom extension's ``.pyi`` of the form::
+
+ def __dataclass_transform__(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+ ) -> Callable[[_T], _T]: ...
+
+ @__dataclass_transform__(field_descriptors=(attr.attrib, attr.field))
+ def custom_define(f): ...
+
+.. warning::
+
+ ``dataclass_transform`` is supported **provisionally** as of ``pyright`` 1.1.135.
+
+ Both the ``pyright`` dataclass_transform_ specification and ``attrs`` implementation may change in future versions.
+
+
+Types
+-----
+
+``attrs`` offers two ways of attaching type information to attributes:
+
+- `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_ annotations on Python 3.6 and later,
+- and the *type* argument to `attr.ib`.
+
+This information is available to you:
+
+.. doctest::
+
+ >>> from attr import attrib, define, field, fields
+ >>> @define
+ ... class C:
+ ... x: int = field()
+ ... y = attrib(type=str)
+ >>> fields(C).x.type
+ <class 'int'>
+ >>> fields(C).y.type
+ <class 'str'>
+
+Currently, ``attrs`` doesn't do anything with this information but it's very useful if you'd like to write your own validators or serializers!
+
+
+.. _extending_metadata:
+
+Metadata
+--------
+
+If you're the author of a third-party library with ``attrs`` integration, you may want to take advantage of attribute metadata.
+
+Here are some tips for effective use of metadata:
+
+- Try making your metadata keys and values immutable.
+ This keeps the entire ``Attribute`` instances immutable too.
+
+- To avoid metadata key collisions, consider exposing your metadata keys from your modules.::
+
+ from mylib import MY_METADATA_KEY
+
+ @define
+ class C:
+ x = field(metadata={MY_METADATA_KEY: 1})
+
+ Metadata should be composable, so consider supporting this approach even if you decide implementing your metadata in one of the following ways.
+
+- Expose ``field`` wrappers for your specific metadata.
+ This is a more graceful approach if your users don't require metadata from other libraries.
+
+ .. doctest::
+
+ >>> from attr import fields, NOTHING
+ >>> MY_TYPE_METADATA = '__my_type_metadata'
+ >>>
+ >>> def typed(
+ ... cls, default=NOTHING, validator=None, repr=True,
+ ... eq=True, order=None, hash=None, init=True, metadata={},
+ ... converter=None
+ ... ):
+ ... metadata = dict() if not metadata else metadata
+ ... metadata[MY_TYPE_METADATA] = cls
+ ... return field(
+ ... default=default, validator=validator, repr=repr,
+ ... eq=eq, order=order, hash=hash, init=init,
+ ... metadata=metadata, converter=converter
+ ... )
+ >>>
+ >>> @define
+ ... class C:
+ ... x: int = typed(int, default=1, init=False)
+ >>> fields(C).x.metadata[MY_TYPE_METADATA]
+ <class 'int'>
+
+
+.. _transform-fields:
+
+Automatic Field Transformation and Modification
+-----------------------------------------------
+
+``attrs`` allows you to automatically modify or transform the class' fields while the class is being created.
+You do this by passing a *field_transformer* hook to `attr.define` (and its friends).
+Its main purpose is to automatically add converters to attributes based on their type to aid the development of API clients and other typed data loaders.
+
+This hook must have the following signature:
+
+.. function:: your_hook(cls: type, fields: list[attrs.Attribute]) -> list[attrs.Attribute]
+ :noindex:
+
+- *cls* is your class right *before* it is being converted into an attrs class.
+ This means it does not yet have the ``__attrs_attrs__`` attribute.
+
+- *fields* is a list of all `attrs.Attribute` instances that will later be set to ``__attrs_attrs__``.
+ You can modify these attributes any way you want:
+ You can add converters, change types, and even remove attributes completely or create new ones!
+
+For example, let's assume that you really don't like floats:
+
+.. doctest::
+
+ >>> def drop_floats(cls, fields):
+ ... return [f for f in fields if f.type not in {float, 'float'}]
+ ...
+ >>> @frozen(field_transformer=drop_floats)
+ ... class Data:
+ ... a: int
+ ... b: float
+ ... c: str
+ ...
+ >>> Data(42, "spam")
+ Data(a=42, c='spam')
+
+A more realistic example would be to automatically convert data that you, e.g., load from JSON:
+
+.. doctest::
+
+ >>> from datetime import datetime
+ >>>
+ >>> def auto_convert(cls, fields):
+ ... results = []
+ ... for field in fields:
+ ... if field.converter is not None:
+ ... results.append(field)
+ ... continue
+ ... if field.type in {datetime, 'datetime'}:
+ ... converter = (lambda d: datetime.fromisoformat(d) if isinstance(d, str) else d)
+ ... else:
+ ... converter = None
+ ... results.append(field.evolve(converter=converter))
+ ... return results
+ ...
+ >>> @frozen(field_transformer=auto_convert)
+ ... class Data:
+ ... a: int
+ ... b: str
+ ... c: datetime
+ ...
+ >>> from_json = {"a": 3, "b": "spam", "c": "2020-05-04T13:37:00"}
+ >>> Data(**from_json) # ****
+ Data(a=3, b='spam', c=datetime.datetime(2020, 5, 4, 13, 37))
+
+
+Customize Value Serialization in ``asdict()``
+---------------------------------------------
+
+``attrs`` allows you to serialize instances of ``attrs`` classes to dicts using the `attrs.asdict` function.
+However, the result can not always be serialized since most data types will remain as they are:
+
+.. doctest::
+
+ >>> import json
+ >>> import datetime
+ >>> from attrs import asdict
+ >>>
+ >>> @frozen
+ ... class Data:
+ ... dt: datetime.datetime
+ ...
+ >>> data = asdict(Data(datetime.datetime(2020, 5, 4, 13, 37)))
+ >>> data
+ {'dt': datetime.datetime(2020, 5, 4, 13, 37)}
+ >>> json.dumps(data)
+ Traceback (most recent call last):
+ ...
+ TypeError: Object of type datetime is not JSON serializable
+
+To help you with this, `attr.asdict` allows you to pass a *value_serializer* hook.
+It has the signature
+
+.. function:: your_hook(inst: type, field: attrs.Attribute, value: typing.Any) -> typing.Any
+ :noindex:
+
+.. doctest::
+
+ >>> from attr import asdict
+ >>> def serialize(inst, field, value):
+ ... if isinstance(value, datetime.datetime):
+ ... return value.isoformat()
+ ... return value
+ ...
+ >>> data = asdict(
+ ... Data(datetime.datetime(2020, 5, 4, 13, 37)),
+ ... value_serializer=serialize,
+ ... )
+ >>> data
+ {'dt': '2020-05-04T13:37:00'}
+ >>> json.dumps(data)
+ '{"dt": "2020-05-04T13:37:00"}'
+
+*****
+
+.. _dataclass_transform: https://github.com/microsoft/pyright/blob/master/specs/dataclass_transforms.md
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst
new file mode 100644
index 0000000000..5fd01f4fb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst
@@ -0,0 +1,104 @@
+Glossary
+========
+
+.. glossary::
+
+ dunder methods
+ "Dunder" is a contraction of "double underscore".
+
+ It's methods like ``__init__`` or ``__eq__`` that are sometimes also called *magic methods* or it's said that they implement an *object protocol*.
+
+ In spoken form, you'd call ``__init__`` just "dunder init".
+
+ Its first documented use is a `mailing list posting <https://mail.python.org/pipermail/python-list/2002-September/155836.html>`_ by Mark Jackson from 2002.
+
+ dict classes
+ A regular class whose attributes are stored in the `object.__dict__` attribute of every single instance.
+ This is quite wasteful especially for objects with very few data attributes and the space consumption can become significant when creating large numbers of instances.
+
+ This is the type of class you get by default both with and without ``attrs`` (except with the next APIs `attr.define`, `attr.mutable`, and `attr.frozen`).
+
+ slotted classes
+ A class whose instances have no `object.__dict__` attribute and `define <https://docs.python.org/3/reference/datamodel.html#slots>`_ their attributes in a `object.__slots__` attribute instead.
+ In ``attrs``, they are created by passing ``slots=True`` to ``@attr.s`` (and are on by default in `attr.define`/`attr.mutable`/`attr.frozen`).
+
+
+ Their main advantage is that they use less memory on CPython [#pypy]_ and are slightly faster.
+
+ However they also come with several possibly surprising gotchas:
+
+ - Slotted classes don't allow for any other attribute to be set except for those defined in one of the class' hierarchies ``__slots__``:
+
+ .. doctest::
+
+ >>> from attr import define
+ >>> @define
+ ... class Coordinates:
+ ... x: int
+ ... y: int
+ ...
+ >>> c = Coordinates(x=1, y=2)
+ >>> c.z = 3
+ Traceback (most recent call last):
+ ...
+ AttributeError: 'Coordinates' object has no attribute 'z'
+
+ - Slotted classes can inherit from other classes just like non-slotted classes, but some of the benefits of slotted classes are lost if you do that.
+ If you must inherit from other classes, try to inherit only from other slotted classes.
+
+ - However, `it's not possible <https://docs.python.org/3/reference/datamodel.html#notes-on-using-slots>`_ to inherit from more than one class that has attributes in ``__slots__`` (you will get an ``TypeError: multiple bases have instance lay-out conflict``).
+
+ - It's not possible to monkeypatch methods on slotted classes.
+ This can feel limiting in test code, however the need to monkeypatch your own classes is usually a design smell.
+
+ If you really need to monkeypatch an instance in your tests, but don't want to give up on the advantages of slotted classes in production code, you can always subclass a slotted class as a dict class with no further changes and all the limitations go away:
+
+ .. doctest::
+
+ >>> import attr, unittest.mock
+ >>> @define
+ ... class Slotted:
+ ... x: int
+ ...
+ ... def method(self):
+ ... return self.x
+ >>> s = Slotted(42)
+ >>> s.method()
+ 42
+ >>> with unittest.mock.patch.object(s, "method", return_value=23):
+ ... pass
+ Traceback (most recent call last):
+ ...
+ AttributeError: 'Slotted' object attribute 'method' is read-only
+ >>> @define(slots=False)
+ ... class Dicted(Slotted):
+ ... pass
+ >>> d = Dicted(42)
+ >>> d.method()
+ 42
+ >>> with unittest.mock.patch.object(d, "method", return_value=23):
+ ... assert 23 == d.method()
+
+ - Slotted classes must implement :meth:`__getstate__ <object.__getstate__>` and :meth:`__setstate__ <object.__setstate__>` to be serializable with `pickle` protocol 0 and 1.
+ Therefore, ``attrs`` creates these methods automatically for ``slots=True`` classes (Python 2 uses protocol 0 by default).
+
+ .. note::
+
+ If the ``@attr.s(slots=True)`` decorated class already implements the :meth:`__getstate__ <object.__getstate__>` and :meth:`__setstate__ <object.__setstate__>` methods, they will be *overwritten* by ``attrs`` autogenerated implementation by default.
+
+ This can be avoided by setting ``@attr.s(getstate_setstate=False)`` or by setting ``@attr.s(auto_detect=True)``.
+
+ Also, `think twice <https://www.youtube.com/watch?v=7KnfGDajDQw>`_ before using `pickle`.
+
+ - Slotted classes are weak-referenceable by default.
+ This can be disabled in CPython by passing ``weakref_slot=False`` to ``@attr.s`` [#pypyweakref]_.
+
+ - Since it's currently impossible to make a class slotted after it's been created, ``attrs`` has to replace your class with a new one.
+ While it tries to do that as graciously as possible, certain metaclass features like `object.__init_subclass__` do not work with slotted classes.
+
+ - The `class.__subclasses__` attribute needs a garbage collection run (which can be manually triggered using `gc.collect`), for the original class to be removed.
+ See issue `#407 <https://github.com/python-attrs/attrs/issues/407>`_ for more details.
+
+
+.. [#pypy] On PyPy, there is no memory advantage in using slotted classes.
+.. [#pypyweakref] On PyPy, slotted classes are naturally weak-referenceable so ``weakref_slot=False`` has no effect.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst
new file mode 100644
index 0000000000..30888f97bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst
@@ -0,0 +1,86 @@
+Hashing
+=======
+
+Hash Method Generation
+----------------------
+
+.. warning::
+
+ The overarching theme is to never set the ``@attr.s(hash=X)`` parameter yourself.
+ Leave it at ``None`` which means that ``attrs`` will do the right thing for you, depending on the other parameters:
+
+ - If you want to make objects hashable by value: use ``@attr.s(frozen=True)``.
+ - If you want hashing and equality by object identity: use ``@attr.s(eq=False)``
+
+ Setting ``hash`` yourself can have unexpected consequences so we recommend to tinker with it only if you know exactly what you're doing.
+
+Under certain circumstances, it's necessary for objects to be *hashable*.
+For example if you want to put them into a `set` or if you want to use them as keys in a `dict`.
+
+The *hash* of an object is an integer that represents the contents of an object.
+It can be obtained by calling `hash` on an object and is implemented by writing a ``__hash__`` method for your class.
+
+``attrs`` will happily write a ``__hash__`` method for you [#fn1]_, however it will *not* do so by default.
+Because according to the definition_ from the official Python docs, the returned hash has to fulfill certain constraints:
+
+#. Two objects that are equal, **must** have the same hash.
+ This means that if ``x == y``, it *must* follow that ``hash(x) == hash(y)``.
+
+ By default, Python classes are compared *and* hashed by their `id`.
+ That means that every instance of a class has a different hash, no matter what attributes it carries.
+
+ It follows that the moment you (or ``attrs``) change the way equality is handled by implementing ``__eq__`` which is based on attribute values, this constraint is broken.
+ For that reason Python 3 will make a class that has customized equality unhashable.
+ Python 2 on the other hand will happily let you shoot your foot off.
+ Unfortunately ``attrs`` currently mimics Python 2's behavior for backward compatibility reasons if you set ``hash=False``.
+
+ The *correct way* to achieve hashing by id is to set ``@attr.s(eq=False)``.
+ Setting ``@attr.s(hash=False)`` (which implies ``eq=True``) is almost certainly a *bug*.
+
+ .. warning::
+
+ Be careful when subclassing!
+ Setting ``eq=False`` on a class whose base class has a non-default ``__hash__`` method will *not* make ``attrs`` remove that ``__hash__`` for you.
+
+ It is part of ``attrs``'s philosophy to only *add* to classes so you have the freedom to customize your classes as you wish.
+ So if you want to *get rid* of methods, you'll have to do it by hand.
+
+ The easiest way to reset ``__hash__`` on a class is adding ``__hash__ = object.__hash__`` in the class body.
+
+#. If two object are not equal, their hash **should** be different.
+
+ While this isn't a requirement from a standpoint of correctness, sets and dicts become less effective if there are a lot of identical hashes.
+ The worst case is when all objects have the same hash which turns a set into a list.
+
+#. The hash of an object **must not** change.
+
+ If you create a class with ``@attr.s(frozen=True)`` this is fullfilled by definition, therefore ``attrs`` will write a ``__hash__`` function for you automatically.
+ You can also force it to write one with ``hash=True`` but then it's *your* responsibility to make sure that the object is not mutated.
+
+ This point is the reason why mutable structures like lists, dictionaries, or sets aren't hashable while immutable ones like tuples or frozensets are:
+ point 1 and 2 require that the hash changes with the contents but point 3 forbids it.
+
+For a more thorough explanation of this topic, please refer to this blog post: `Python Hashes and Equality`_.
+
+
+Hashing and Mutability
+----------------------
+
+Changing any field involved in hash code computation after the first call to ``__hash__`` (typically this would be after its insertion into a hash-based collection) can result in silent bugs.
+Therefore, it is strongly recommended that hashable classes be ``frozen``.
+Beware, however, that this is not a complete guarantee of safety:
+if a field points to an object and that object is mutated, the hash code may change, but ``frozen`` will not protect you.
+
+
+Hash Code Caching
+-----------------
+
+Some objects have hash codes which are expensive to compute.
+If such objects are to be stored in hash-based collections, it can be useful to compute the hash codes only once and then store the result on the object to make future hash code requests fast.
+To enable caching of hash codes, pass ``cache_hash=True`` to ``@attrs``.
+This may only be done if ``attrs`` is already generating a hash function for the object.
+
+.. [#fn1] The hash is computed by hashing a tuple that consists of an unique id for the class plus all attribute values.
+
+.. _definition: https://docs.python.org/3/glossary.html#term-hashable
+.. _`Python Hashes and Equality`: https://hynek.me/articles/hashes-and-equality/
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst
new file mode 100644
index 0000000000..f899740542
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst
@@ -0,0 +1,109 @@
+.. _how:
+
+How Does It Work?
+=================
+
+
+Boilerplate
+-----------
+
+``attrs`` certainly isn't the first library that aims to simplify class definition in Python.
+But its **declarative** approach combined with **no runtime overhead** lets it stand out.
+
+Once you apply the ``@attrs.define`` (or ``@attr.s``) decorator to a class, ``attrs`` searches the class object for instances of ``attr.ib``\ s.
+Internally they're a representation of the data passed into ``attr.ib`` along with a counter to preserve the order of the attributes.
+Alternatively, it's possible to define them using :doc:`types`.
+
+In order to ensure that subclassing works as you'd expect it to work, ``attrs`` also walks the class hierarchy and collects the attributes of all base classes.
+Please note that ``attrs`` does *not* call ``super()`` *ever*.
+It will write :term:`dunder methods` to work on *all* of those attributes which also has performance benefits due to fewer function calls.
+
+Once ``attrs`` knows what attributes it has to work on, it writes the requested :term:`dunder methods` and -- depending on whether you wish to have a :term:`dict <dict classes>` or :term:`slotted <slotted classes>` class -- creates a new class for you (``slots=True``) or attaches them to the original class (``slots=False``).
+While creating new classes is more elegant, we've run into several edge cases surrounding metaclasses that make it impossible to go this route unconditionally.
+
+To be very clear: if you define a class with a single attribute without a default value, the generated ``__init__`` will look *exactly* how you'd expect:
+
+.. doctest::
+
+ >>> import inspect
+ >>> from attr import define
+ >>> @define
+ ... class C:
+ ... x: int
+ >>> print(inspect.getsource(C.__init__))
+ def __init__(self, x):
+ self.x = x
+ <BLANKLINE>
+
+No magic, no meta programming, no expensive introspection at runtime.
+
+****
+
+Everything until this point happens exactly *once* when the class is defined.
+As soon as a class is done, it's done.
+And it's just a regular Python class like any other, except for a single ``__attrs_attrs__`` attribute that ``attrs`` uses internally.
+Much of the information is accessible via `attrs.fields` and other functions which can be used for introspection or for writing your own tools and decorators on top of ``attrs`` (like `attrs.asdict`).
+
+And once you start instantiating your classes, ``attrs`` is out of your way completely.
+
+This **static** approach was very much a design goal of ``attrs`` and what I strongly believe makes it distinct.
+
+
+.. _how-frozen:
+
+Immutability
+------------
+
+In order to give you immutability, ``attrs`` will attach a ``__setattr__`` method to your class that raises an `attrs.exceptions.FrozenInstanceError` whenever anyone tries to set an attribute.
+
+The same is true if you choose to freeze individual attributes using the `attrs.setters.frozen` *on_setattr* hook -- except that the exception becomes `attrs.exceptions.FrozenAttributeError`.
+
+Both errors subclass `attrs.exceptions.FrozenError`.
+
+-----
+
+Depending on whether a class is a dict class or a slotted class, ``attrs`` uses a different technique to circumvent that limitation in the ``__init__`` method.
+
+Once constructed, frozen instances don't differ in any way from regular ones except that you cannot change its attributes.
+
+
+Dict Classes
+++++++++++++
+
+Dict classes -- i.e. regular classes -- simply assign the value directly into the class' eponymous ``__dict__`` (and there's nothing we can do to stop the user to do the same).
+
+The performance impact is negligible.
+
+
+Slotted Classes
++++++++++++++++
+
+Slotted classes are more complicated.
+Here it uses (an aggressively cached) :meth:`object.__setattr__` to set your attributes.
+This is (still) slower than a plain assignment:
+
+.. code-block:: none
+
+ $ pyperf timeit --rigorous \
+ -s "import attr; C = attr.make_class('C', ['x', 'y', 'z'], slots=True)" \
+ "C(1, 2, 3)"
+ ........................................
+ Median +- std dev: 378 ns +- 12 ns
+
+ $ pyperf timeit --rigorous \
+ -s "import attr; C = attr.make_class('C', ['x', 'y', 'z'], slots=True, frozen=True)" \
+ "C(1, 2, 3)"
+ ........................................
+ Median +- std dev: 676 ns +- 16 ns
+
+So on a laptop computer the difference is about 300 nanoseconds (1 second is 1,000,000,000 nanoseconds).
+It's certainly something you'll feel in a hot loop but shouldn't matter in normal code.
+Pick what's more important to you.
+
+
+Summary
++++++++
+
+You should avoid instantiating lots of frozen slotted classes (i.e. ``@frozen``) in performance-critical code.
+
+Frozen dict classes have barely a performance impact, unfrozen slotted classes are even *faster* than unfrozen dict classes (i.e. regular classes).
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/index.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/index.rst
new file mode 100644
index 0000000000..ff65a6738c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/index.rst
@@ -0,0 +1,100 @@
+.. module:: attr
+.. module:: attrs
+
+======================================
+``attrs``: Classes Without Boilerplate
+======================================
+
+Release v\ |release| (`What's new? <changelog>`).
+
+.. include:: ../README.rst
+ :start-after: teaser-begin
+ :end-before: teaser-end
+
+
+Getting Started
+===============
+
+``attrs`` is a Python-only package `hosted on PyPI <https://pypi.org/project/attrs/>`_.
+The recommended installation method is `pip <https://pip.pypa.io/en/stable/>`_-installing into a `virtualenv <https://hynek.me/articles/virtualenv-lives/>`_:
+
+.. code-block:: console
+
+ $ python -m pip install attrs
+
+The next three steps should bring you up and running in no time:
+
+- `overview` will show you a simple example of ``attrs`` in action and introduce you to its philosophy.
+ Afterwards, you can start writing your own classes and understand what drives ``attrs``'s design.
+- `examples` will give you a comprehensive tour of ``attrs``'s features.
+ After reading, you will know about our advanced features and how to use them.
+- If you're confused by all the ``attr.s``, ``attr.ib``, ``attrs``, ``attrib``, ``define``, ``frozen``, and ``field``, head over to `names` for a very short explanation, and optionally a quick history lesson.
+- Finally `why` gives you a rundown of potential alternatives and why we think ``attrs`` is superior.
+ Yes, we've heard about ``namedtuple``\ s and Data Classes!
+- If at any point you get confused by some terminology, please check out our `glossary`.
+
+
+If you need any help while getting started, feel free to use the ``python-attrs`` tag on `Stack Overflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ and someone will surely help you out!
+
+
+Day-to-Day Usage
+================
+
+- `types` help you to write *correct* and *self-documenting* code.
+ ``attrs`` has first class support for them, yet keeps them optional if you’re not convinced!
+- Instance initialization is one of ``attrs`` key feature areas.
+ Our goal is to relieve you from writing as much code as possible.
+ `init` gives you an overview what ``attrs`` has to offer and explains some related philosophies we believe in.
+- Comparing and ordering objects is a common task.
+ `comparison` shows you how ``attrs`` helps you with that and how you can customize it.
+- If you want to put objects into sets or use them as keys in dictionaries, they have to be hashable.
+ The simplest way to do that is to use frozen classes, but the topic is more complex than it seems and `hashing` will give you a primer on what to look out for.
+- Once you're comfortable with the concepts, our `api` contains all information you need to use ``attrs`` to its fullest.
+- ``attrs`` is built for extension from the ground up.
+ `extending` will show you the affordances it offers and how to make it a building block of your own projects.
+
+
+.. include:: ../README.rst
+ :start-after: -getting-help-
+ :end-before: -project-information-
+
+
+----
+
+
+Full Table of Contents
+======================
+
+.. toctree::
+ :maxdepth: 2
+
+ overview
+ why
+ examples
+ types
+ init
+ comparison
+ hashing
+ api
+ extending
+ how-does-it-work
+ names
+ glossary
+
+
+.. include:: ../README.rst
+ :start-after: -project-information-
+
+.. toctree::
+ :maxdepth: 1
+
+ license
+ python-2
+ changelog
+
+
+Indices and tables
+==================
+
+* `genindex`
+* `search`
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/init.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/init.rst
new file mode 100644
index 0000000000..fb276ded8a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/init.rst
@@ -0,0 +1,489 @@
+Initialization
+==============
+
+In Python, instance initialization happens in the ``__init__`` method.
+Generally speaking, you should keep as little logic as possible in it, and you should think about what the class needs and not how it is going to be instantiated.
+
+Passing complex objects into ``__init__`` and then using them to derive data for the class unnecessarily couples your new class with the old class which makes it harder to test and also will cause problems later.
+
+So assuming you use an ORM and want to extract 2D points from a row object, do not write code like this::
+
+ class Point(object):
+ def __init__(self, database_row):
+ self.x = database_row.x
+ self.y = database_row.y
+
+ pt = Point(row)
+
+Instead, write a `classmethod` that will extract it for you::
+
+ @define
+ class Point:
+ x: float
+ y: float
+
+ @classmethod
+ def from_row(cls, row):
+ return cls(row.x, row.y)
+
+ pt = Point.from_row(row)
+
+Now you can instantiate ``Point``\ s without creating fake row objects in your tests and you can have as many smart creation helpers as you want, in case more data sources appear.
+
+For similar reasons, we strongly discourage from patterns like::
+
+ pt = Point(**row.attributes)
+
+which couples your classes to the database data model.
+Try to design your classes in a way that is clean and convenient to use -- not based on your database format.
+The database format can change anytime and you're stuck with a bad class design that is hard to change.
+Embrace functions and classmethods as a filter between reality and what's best for you to work with.
+
+If you look for object serialization, there's a bunch of projects listed on our ``attrs`` extensions `Wiki page`_.
+Some of them even support nested schemas.
+
+
+Private Attributes
+------------------
+
+One thing people tend to find confusing is the treatment of private attributes that start with an underscore.
+``attrs`` follows the doctrine that `there is no such thing as a private argument`_ and strips the underscores from the name when writing the ``__init__`` method signature:
+
+.. doctest::
+
+ >>> import inspect, attr, attrs
+ >>> from attr import define
+ >>> @define
+ ... class C:
+ ... _x: int
+ >>> inspect.signature(C.__init__)
+ <Signature (self, x: int) -> None>
+
+There really isn't a right or wrong, it's a matter of taste.
+But it's important to be aware of it because it can lead to surprising syntax errors:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... _1: int
+ Traceback (most recent call last):
+ ...
+ SyntaxError: invalid syntax
+
+In this case a valid attribute name ``_1`` got transformed into an invalid argument name ``1``.
+
+
+Defaults
+--------
+
+Sometimes you don't want to pass all attribute values to a class.
+And sometimes, certain attributes aren't even intended to be passed but you want to allow for customization anyways for easier testing.
+
+This is when default values come into play:
+
+.. doctest::
+
+ >>> from attr import define, field, Factory
+
+ >>> @define
+ ... class C:
+ ... a: int = 42
+ ... b: list = field(factory=list)
+ ... c: list = Factory(list) # syntactic sugar for above
+ ... d: dict = field()
+ ... @d.default
+ ... def _any_name_except_a_name_of_an_attribute(self):
+ ... return {}
+ >>> C()
+ C(a=42, b=[], c=[], d={})
+
+It's important that the decorated method -- or any other method or property! -- doesn't have the same name as the attribute, otherwise it would overwrite the attribute definition.
+
+Please note that as with function and method signatures, ``default=[]`` will *not* do what you may think it might do:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = []
+ >>> i = C()
+ >>> k = C()
+ >>> i.x.append(42)
+ >>> k.x
+ [42]
+
+
+This is why ``attrs`` comes with factory options.
+
+.. warning::
+
+ Please note that the decorator based defaults have one gotcha:
+ they are executed when the attribute is set, that means depending on the order of attributes, the ``self`` object may not be fully initialized when they're called.
+
+ Therefore you should use ``self`` as little as possible.
+
+ Even the smartest of us can `get confused`_ by what happens if you pass partially initialized objects around.
+
+
+.. _validators:
+
+Validators
+----------
+
+Another thing that definitely *does* belong in ``__init__`` is checking the resulting instance for invariants.
+This is why ``attrs`` has the concept of validators.
+
+
+Decorator
+~~~~~~~~~
+
+The most straightforward way is using the attribute's ``validator`` method as a decorator.
+
+The method has to accept three arguments:
+
+#. the *instance* that's being validated (aka ``self``),
+#. the *attribute* that it's validating, and finally
+#. the *value* that is passed for it.
+
+If the value does not pass the validator's standards, it just raises an appropriate exception.
+
+ >>> @define
+ ... class C:
+ ... x: int = field()
+ ... @x.validator
+ ... def _check_x(self, attribute, value):
+ ... if value > 42:
+ ... raise ValueError("x must be smaller or equal to 42")
+ >>> C(42)
+ C(x=42)
+ >>> C(43)
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be smaller or equal to 42
+
+Again, it's important that the decorated method doesn't have the same name as the attribute and that the `attrs.field()` helper is used.
+
+
+Callables
+~~~~~~~~~
+
+If you want to re-use your validators, you should have a look at the ``validator`` argument to `attrs.field`.
+
+It takes either a callable or a list of callables (usually functions) and treats them as validators that receive the same arguments as with the decorator approach.
+
+Since the validators run *after* the instance is initialized, you can refer to other attributes while validating:
+
+.. doctest::
+
+ >>> def x_smaller_than_y(instance, attribute, value):
+ ... if value >= instance.y:
+ ... raise ValueError("'x' has to be smaller than 'y'!")
+ >>> @define
+ ... class C:
+ ... x = field(validator=[attrs.validators.instance_of(int),
+ ... x_smaller_than_y])
+ ... y = field()
+ >>> C(x=3, y=4)
+ C(x=3, y=4)
+ >>> C(x=4, y=3)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'x' has to be smaller than 'y'!
+
+This example also shows of some syntactic sugar for using the `attrs.validators.and_` validator: if you pass a list, all validators have to pass.
+
+``attrs`` won't intercept your changes to those attributes but you can always call `attrs.validate` on any instance to verify that it's still valid:
+When using `attrs.define` or `attrs.frozen`, ``attrs`` will run the validators even when setting the attribute.
+
+.. doctest::
+
+ >>> i = C(4, 5)
+ >>> i.x = 5
+ Traceback (most recent call last):
+ ...
+ ValueError: 'x' has to be smaller than 'y'!
+
+``attrs`` ships with a bunch of validators, make sure to `check them out <api_validators>` before writing your own:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = field(validator=attrs.validators.instance_of(int))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, factory=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None), <type 'int'>, '42')
+
+Of course you can mix and match the two approaches at your convenience.
+If you define validators both ways for an attribute, they are both ran:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = field(validator=attrs.validators.instance_of(int))
+ ... @x.validator
+ ... def fits_byte(self, attribute, value):
+ ... if not 0 <= value < 256:
+ ... raise ValueError("value out of bounds")
+ >>> C(128)
+ C(x=128)
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=None, converter=one), <class 'int'>, '128')
+ >>> C(256)
+ Traceback (most recent call last):
+ ...
+ ValueError: value out of bounds
+
+And finally you can disable validators globally:
+
+ >>> attrs.validators.set_disabled(True)
+ >>> C("128")
+ C(x='128')
+ >>> attrs.validators.set_disabled(False)
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=None, converter=None), <class 'int'>, '128')
+
+You can achieve the same by using the context manager:
+
+ >>> with attrs.validators.disabled():
+ ... C("128")
+ C(x='128')
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=None, converter=None), <class 'int'>, '128')
+
+
+.. _converters:
+
+Converters
+----------
+
+Finally, sometimes you may want to normalize the values coming in.
+For that ``attrs`` comes with converters.
+
+Attributes can have a ``converter`` function specified, which will be called with the attribute's passed-in value to get a new value to use.
+This can be useful for doing type-conversions on values that you don't want to force your callers to do.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = field(converter=int)
+ >>> o = C("1")
+ >>> o.x
+ 1
+
+Converters are run *before* validators, so you can use validators to check the final form of the value.
+
+.. doctest::
+
+ >>> def validate_x(instance, attribute, value):
+ ... if value < 0:
+ ... raise ValueError("x must be at least 0.")
+ >>> @define
+ ... class C:
+ ... x = field(converter=int, validator=validate_x)
+ >>> o = C("0")
+ >>> o.x
+ 0
+ >>> C("-1")
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be at least 0.
+
+
+Arguably, you can abuse converters as one-argument validators:
+
+.. doctest::
+
+ >>> C("x")
+ Traceback (most recent call last):
+ ...
+ ValueError: invalid literal for int() with base 10: 'x'
+
+
+If a converter's first argument has a type annotation, that type will appear in the signature for ``__init__``.
+A converter will override an explicit type annotation or ``type`` argument.
+
+.. doctest::
+
+ >>> def str2int(x: str) -> int:
+ ... return int(x)
+ >>> @define
+ ... class C:
+ ... x = field(converter=str2int)
+ >>> C.__init__.__annotations__
+ {'return': None, 'x': <class 'str'>}
+
+
+Hooking Yourself Into Initialization
+------------------------------------
+
+Generally speaking, the moment you think that you need finer control over how your class is instantiated than what ``attrs`` offers, it's usually best to use a classmethod factory or to apply the `builder pattern <https://en.wikipedia.org/wiki/Builder_pattern>`_.
+
+However, sometimes you need to do that one quick thing before or after your class is initialized.
+And for that ``attrs`` offers three means:
+
+- ``__attrs_pre_init__`` is automatically detected and run *before* ``attrs`` starts initializing.
+ This is useful if you need to inject a call to ``super().__init__()``.
+- ``__attrs_post_init__`` is automatically detected and run *after* ``attrs`` is done initializing your instance.
+ This is useful if you want to derive some attribute from others or perform some kind of validation over the whole instance.
+- ``__attrs_init__`` is written and attached to your class *instead* of ``__init__``, if ``attrs`` is told to not write one (i.e. ``init=False`` or a combination of ``auto_detect=True`` and a custom ``__init__``).
+ This is useful if you want full control over the initialization process, but don't want to set the attributes by hand.
+
+
+Pre Init
+~~~~~~~~
+
+The sole reason for the existance of ``__attrs_pre_init__`` is to give users the chance to call ``super().__init__()``, because some subclassing-based APIs require that.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... def __attrs_pre_init__(self):
+ ... super().__init__()
+ >>> C(42)
+ C(x=42)
+
+If you need more control, use the custom init approach described next.
+
+
+Custom Init
+~~~~~~~~~~~
+
+If you tell ``attrs`` to not write an ``__init__``, it will write an ``__attrs_init__`` instead, with the same code that it would have used for ``__init__``.
+You have full control over the initialization, but also have to type out the types of your arguments etc.
+Here's an example of a manual default value:
+
+.. doctest::
+
+ >>> from typing import Optional
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ...
+ ... def __init__(self, x: int = 42):
+ ... self.__attrs_init__(x)
+ >>> C()
+ C(x=42)
+
+
+Post Init
+~~~~~~~~~
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... y: int = field(init=False)
+ ... def __attrs_post_init__(self):
+ ... self.y = self.x + 1
+ >>> C(1)
+ C(x=1, y=2)
+
+Please note that you can't directly set attributes on frozen classes:
+
+.. doctest::
+
+ >>> @frozen
+ ... class FrozenBroken:
+ ... x: int
+ ... y: int = field(init=False)
+ ... def __attrs_post_init__(self):
+ ... self.y = self.x + 1
+ >>> FrozenBroken(1)
+ Traceback (most recent call last):
+ ...
+ attrs.exceptions.FrozenInstanceError: can't set attribute
+
+If you need to set attributes on a frozen class, you'll have to resort to the `same trick <how-frozen>` as ``attrs`` and use :meth:`object.__setattr__`:
+
+.. doctest::
+
+ >>> @define
+ ... class Frozen:
+ ... x: int
+ ... y: int = field(init=False)
+ ... def __attrs_post_init__(self):
+ ... object.__setattr__(self, "y", self.x + 1)
+ >>> Frozen(1)
+ Frozen(x=1, y=2)
+
+Note that you *must not* access the hash code of the object in ``__attrs_post_init__`` if ``cache_hash=True``.
+
+
+Order of Execution
+------------------
+
+If present, the hooks are executed in the following order:
+
+1. ``__attrs_pre_init__`` (if present on *current* class)
+2. For each attribute, in the order it was declared:
+
+ a. default factory
+ b. converter
+
+3. *all* validators
+4. ``__attrs_post_init__`` (if present on *current* class)
+
+Notably this means, that you can access all attributes from within your validators, but your converters have to deal with invalid values and have to return a valid value.
+
+
+Derived Attributes
+------------------
+
+One of the most common ``attrs`` questions on *Stack Overflow* is how to have attributes that depend on other attributes.
+For example if you have an API token and want to instantiate a web client that uses it for authentication.
+Based on the previous sections, there's two approaches.
+
+The simpler one is using ``__attrs_post_init__``::
+
+ @define
+ class APIClient:
+ token: str
+ client: WebClient = field(init=False)
+
+ def __attrs_post_init__(self):
+ self.client = WebClient(self.token)
+
+The second one is using a decorator-based default::
+
+ @define
+ class APIClient:
+ token: str
+ client: WebClient = field() # needed! attr.ib works too
+
+ @client.default
+ def _client_factory(self):
+ return WebClient(self.token)
+
+That said, and as pointed out in the beginning of the chapter, a better approach would be to have a factory class method::
+
+ @define
+ class APIClient:
+ client: WebClient
+
+ @classmethod
+ def from_token(cls, token: str) -> SomeClass:
+ return cls(client=WebClient(token))
+
+This makes the class more testable.
+
+
+.. _`Wiki page`: https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs
+.. _`get confused`: https://github.com/python-attrs/attrs/issues/289
+.. _`there is no such thing as a private argument`: https://github.com/hynek/characteristic/issues/6
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/license.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/license.rst
new file mode 100644
index 0000000000..a341a31eb9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/license.rst
@@ -0,0 +1,8 @@
+===================
+License and Credits
+===================
+
+``attrs`` is licensed under the `MIT <https://choosealicense.com/licenses/mit/>`_ license.
+The full license text can be also found in the `source code repository <https://github.com/python-attrs/attrs/blob/main/LICENSE>`_.
+
+.. include:: ../AUTHORS.rst
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/names.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/names.rst
new file mode 100644
index 0000000000..0fe953e6a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/names.rst
@@ -0,0 +1,122 @@
+On The Core API Names
+=====================
+
+You may be surprised seeing ``attrs`` classes being created using `attrs.define` and with type annotated fields, instead of `attr.s` and `attr.ib()`.
+
+Or, you wonder why the web and talks are full of this weird `attr.s` and `attr.ib` -- including people having strong opinions about it and using ``attr.attrs`` and ``attr.attrib`` instead.
+
+And what even is ``attr.dataclass`` that's not documented but commonly used!?
+
+
+TL;DR
+-----
+
+We recommend our modern APIs for new code:
+
+- `attrs.define()` to define a new class,
+- `attrs.mutable()` is an alias for `attrs.define()`,
+- `attrs.frozen()` is an alias for ``define(frozen=True)``
+- and `attrs.field()` to define an attribute.
+
+They have been added in ``attrs`` 20.1.0, they are expressive, and they have modern defaults like slots and type annotation awareness switched on by default.
+They are only available in Python 3.6 and later.
+Sometimes they're referred to as *next-generation* or *NG* APIs.
+As of ``attrs`` 21.3.0 you can also import them from the ``attrs`` package namespace.
+
+The traditional APIs `attr.s` / `attr.ib`, their serious business aliases ``attr.attrs`` / ``attr.attrib``, and the never-documented, but popular ``attr.dataclass`` easter egg will stay **forever**.
+
+``attrs`` will **never** force you to use type annotations.
+
+
+A Short History Lesson
+----------------------
+
+At this point, ``attrs`` is an old project.
+It had its first release in April 2015 -- back when most Python code was on Python 2.7 and Python 3.4 was the first Python 3 release that showed promise.
+``attrs`` was always Python 3-first, but `type annotations <https://www.python.org/dev/peps/pep-0484/>`_ came only into Python 3.5 that was released in September 2015 and were largely ignored until years later.
+
+At this time, if you didn't want to implement all the :term:`dunder methods`, the most common way to create a class with some attributes on it was to subclass a `collections.namedtuple`, or one of the many hacks that allowed you to access dictionary keys using attribute lookup.
+
+But ``attrs`` history goes even a bit further back, to the now-forgotten `characteristic <https://github.com/hynek/characteristic>`_ that came out in May 2014 and already used a class decorator, but was overall too unergonomic.
+
+In the wake of all of that, `glyph <https://twitter.com/glyph>`_ and `Hynek <https://twitter.com/hynek>`_ came together on IRC and brainstormed how to take the good ideas of ``characteristic``, but make them easier to use and read.
+At this point the plan was not to make ``attrs`` what it is now -- a flexible class building kit.
+All we wanted was an ergonomic little library to succinctly define classes with attributes.
+
+Under the impression of of the unwieldy ``characteristic`` name, we went to the other side and decided to make the package name part of the API, and keep the API functions very short.
+This led to the infamous `attr.s` and `attr.ib` which some found confusing and pronounced it as "attr dot s" or used a singular ``@s`` as the decorator.
+But it was really just a way to say ``attrs`` and ``attrib``\ [#attr]_.
+
+Some people hated this cutey API from day one, which is why we added aliases for them that we called *serious business*: ``@attr.attrs`` and ``attr.attrib()``.
+Fans of them usually imported the names and didn't use the package name in the first place.
+Unfortunately, the ``attr`` package name started creaking the moment we added ``attr.Factory``, since it couldn’t be morphed into something meaningful in any way.
+A problem that grew worse over time, as more APIs and even modules were added.
+
+But overall, ``attrs`` in this shape was a **huge** success -- especially after glyph's blog post `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_ in August 2016 and `pytest <https://docs.pytest.org/>`_ adopting it.
+
+Being able to just write::
+
+ @attr.s
+ class Point(object):
+ x = attr.ib()
+ y = attr.ib()
+
+was a big step for those who wanted to write small, focused classes.
+
+
+Dataclasses Enter The Arena
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A big change happened in May 2017 when Hynek sat down with `Guido van Rossum <https://en.wikipedia.org/wiki/Guido_van_Rossum>`_ and `Eric V. Smith <https://github.com/ericvsmith>`_ at PyCon US 2017.
+
+Type annotations for class attributes have `just landed <https://www.python.org/dev/peps/pep-0526/>`_ in Python 3.6 and Guido felt like it would be a good mechanic to introduce something similar to ``attrs`` to the Python standard library.
+The result, of course, was `PEP 557 <https://www.python.org/dev/peps/pep-0557/>`_\ [#stdlib]_ which eventually became the `dataclasses` module in Python 3.7.
+
+``attrs`` at this point was lucky to have several people on board who were also very excited about type annotations and helped implementing it; including a `Mypy plugin <https://medium.com/@Pilot-EPD-Blog/mypy-and-attrs-e1b0225e9ac6>`_.
+And so it happened that ``attrs`` `shipped <https://www.attrs.org/en/17.3.0.post2/changelog.html>`_ the new method of defining classes more than half a year before Python 3.7 -- and thus `dataclasses` -- were released.
+
+-----
+
+Due to backward-compatibility concerns, this feature is off by default in the `attr.s` decorator and has to be activated using ``@attr.s(auto_attribs=True)``, though.
+As a little easter egg and to save ourselves some typing, we've also `added <https://github.com/python-attrs/attrs/commit/88aa1c897dfe2ee4aa987e4a56f2ba1344a17238#diff-4fc63db1f2fcb7c6e464ee9a77c3c74e90dd191d1c9ffc3bdd1234d3a6663dc0R48>`_ an alias called ``attr.dataclasses`` that just set ``auto_attribs=True``.
+It was never documented, but people found it and used it and loved it.
+
+Over the next months and years it became clear that type annotations have become the popular way to define classes and their attributes.
+However, it has also become clear that some people viscerally hate type annotations.
+We're determined to serve both.
+
+
+``attrs`` TNG
+^^^^^^^^^^^^^
+
+Over its existence, ``attrs`` never stood still.
+But since we also greatly care about backward compatibility and not breaking our users's code, many features and niceties have to be manually activated.
+
+That is not only annoying, it also leads to the problem that many of ``attrs``'s users don't even know what it can do for them.
+We've spent years alone explaining that defining attributes using type annotations is in no way unique to `dataclasses`.
+
+Finally we've decided to take the `Go route <https://go.dev/blog/module-compatibility>`_:
+instead of fiddling with the old APIs -- whose names felt anachronistic anyway -- we'd define new ones, with better defaults.
+So in July 2018, we `looked for better names <https://github.com/python-attrs/attrs/issues/408>`_ and came up with `attr.define`, `attr.field`, and friends.
+Then in January 2019, we `started looking for inconvenient defaults <https://github.com/python-attrs/attrs/issues/487>`_ that we now could fix without any repercussions.
+
+These APIs proved to be very popular, so we've finally changed the documentation to them in November of 2021.
+
+All of this took way too long, of course.
+One reason is the COVID-19 pandemic, but also our fear to fumble this historic chance to fix our APIs.
+
+Finally, in December 2021, we've added the ``attrs`` package namespace.
+
+We hope you like the result::
+
+ from attrs import define
+
+ @define
+ class Point:
+ x: int
+ y: int
+
+
+.. [#attr] We considered calling the PyPI package just ``attr`` too, but the name was already taken by an *ostensibly* inactive `package on PyPI <https://pypi.org/project/attr/#history>`_.
+.. [#stdlib] The highly readable PEP also explains why ``attrs`` wasn't just added to the standard library.
+ Don't believe the myths and rumors.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst
new file mode 100644
index 0000000000..b35f66f2dd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst
@@ -0,0 +1,58 @@
+========
+Overview
+========
+
+In order to fulfill its ambitious goal of bringing back the joy to writing classes, it gives you a class decorator and a way to declaratively define the attributes on that class:
+
+.. include:: ../README.rst
+ :start-after: -code-begin-
+ :end-before: -getting-help-
+
+
+.. _philosophy:
+
+Philosophy
+==========
+
+**It's about regular classes.**
+ ``attrs`` is for creating well-behaved classes with a type, attributes, methods, and everything that comes with a class.
+ It can be used for data-only containers like ``namedtuple``\ s or ``types.SimpleNamespace`` but they're just a sub-genre of what ``attrs`` is good for.
+
+**The class belongs to the users.**
+ You define a class and ``attrs`` adds static methods to that class based on the attributes you declare.
+ The end.
+ It doesn't add metaclasses.
+ It doesn't add classes you've never heard of to your inheritance tree.
+ An ``attrs`` class in runtime is indistinguishable from a regular class: because it *is* a regular class with a few boilerplate-y methods attached.
+
+**Be light on API impact.**
+ As convenient as it seems at first, ``attrs`` will *not* tack on any methods to your classes except for the :term:`dunder ones <dunder methods>`.
+ Hence all the useful `tools <helpers>` that come with ``attrs`` live in functions that operate on top of instances.
+ Since they take an ``attrs`` instance as their first argument, you can attach them to your classes with one line of code.
+
+**Performance matters.**
+ ``attrs`` runtime impact is very close to zero because all the work is done when the class is defined.
+ Once you're instantiating it, ``attrs`` is out of the picture completely.
+
+**No surprises.**
+ ``attrs`` creates classes that arguably work the way a Python beginner would reasonably expect them to work.
+ It doesn't try to guess what you mean because explicit is better than implicit.
+ It doesn't try to be clever because software shouldn't be clever.
+
+Check out `how-does-it-work` if you'd like to know how it achieves all of the above.
+
+
+What ``attrs`` Is Not
+=====================
+
+``attrs`` does *not* invent some kind of magic system that pulls classes out of its hat using meta classes, runtime introspection, and shaky interdependencies.
+
+All ``attrs`` does is:
+
+1. take your declaration,
+2. write :term:`dunder methods` based on that information,
+3. and attach them to your class.
+
+It does *nothing* dynamic at runtime, hence zero runtime overhead.
+It's still *your* class.
+Do with it as you please.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst
new file mode 100644
index 0000000000..7ec9e5112c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst
@@ -0,0 +1,25 @@
+Python 2 Statement
+==================
+
+While ``attrs`` has always been a Python 3-first package, we the maintainers are aware that Python 2 has not magically disappeared in 2020.
+We are also aware that ``attrs`` is an important building block in many people's systems and livelihoods.
+
+As such, we do **not** have any immediate plans to drop Python 2 support in ``attrs``.
+We intend to support is as long as it will be technically feasible for us.
+
+Feasibility in this case means:
+
+1. Possibility to run the tests on our development computers,
+2. and **free** CI options.
+
+This can mean that we will have to run our tests on PyPy, whose maintainters have unequivocally declared that they do not intend to stop the development and maintenance of their Python 2-compatible line at all.
+And this can mean that at some point, a sponsor will have to step up and pay for bespoke CI setups.
+
+**However**: there is no promise of new features coming to ``attrs`` running under Python 2.
+It is up to our discretion alone, to decide whether the introduced complexity or awkwardness are worth it, or whether we choose to make a feature available on modern platforms only.
+
+
+Summary
+-------
+
+We will do our best to support existing users, but nobody is entitled to the latest and greatest features on a platform that is officially end of life.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/types.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/types.rst
new file mode 100644
index 0000000000..fbb90a7e93
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/types.rst
@@ -0,0 +1,108 @@
+Type Annotations
+================
+
+``attrs`` comes with first class support for type annotations for both Python 3.6 (:pep:`526`) and legacy syntax.
+
+However they will forever remain *optional*, therefore the example from the README could also be written as:
+
+.. doctest::
+
+ >>> from attrs import define, field
+
+ >>> @define
+ ... class SomeClass:
+ ... a_number = field(default=42)
+ ... list_of_numbers = field(factory=list)
+
+ >>> sc = SomeClass(1, [1, 2, 3])
+ >>> sc
+ SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+You can choose freely between the approaches, but please remember that if you choose to use type annotations, you **must** annotate **all** attributes!
+
+----
+
+Even when going all-in an type annotations, you will need `attr.field` for some advanced features though.
+
+One of those features are the decorator-based features like defaults.
+It's important to remember that ``attrs`` doesn't do any magic behind your back.
+All the decorators are implemented using an object that is returned by the call to `attrs.field`.
+
+Attributes that only carry a class annotation do not have that object so trying to call a method on it will inevitably fail.
+
+*****
+
+Please note that types -- however added -- are *only metadata* that can be queried from the class and they aren't used for anything out of the box!
+
+Because Python does not allow references to a class object before the class is defined,
+types may be defined as string literals, so-called *forward references* (:pep:`526`).
+You can enable this automatically for a whole module by using ``from __future__ import annotations`` (:pep:`563`) as of Python 3.7.
+In this case ``attrs`` simply puts these string literals into the ``type`` attributes.
+If you need to resolve these to real types, you can call `attrs.resolve_types` which will update the attribute in place.
+
+In practice though, types show their biggest usefulness in combination with tools like mypy_, pytype_, or pyright_ that have dedicated support for ``attrs`` classes.
+
+The addition of static types is certainly one of the most exciting features in the Python ecosystem and helps you writing *correct* and *verified self-documenting* code.
+
+If you don't know where to start, Carl Meyer gave a great talk on `Type-checked Python in the Real World <https://www.youtube.com/watch?v=pMgmKJyWKn8>`_ at PyCon US 2018 that will help you to get started in no time.
+
+
+mypy
+----
+
+While having a nice syntax for type metadata is great, it's even greater that mypy_ as of 0.570 ships with a dedicated ``attrs`` plugin which allows you to statically check your code.
+
+Imagine you add another line that tries to instantiate the defined class using ``SomeClass("23")``.
+Mypy will catch that error for you:
+
+.. code-block:: console
+
+ $ mypy t.py
+ t.py:12: error: Argument 1 to "SomeClass" has incompatible type "str"; expected "int"
+
+This happens *without* running your code!
+
+And it also works with *both* Python 2-style annotation styles.
+To mypy, this code is equivalent to the one above:
+
+.. code-block:: python
+
+ @attr.s
+ class SomeClass(object):
+ a_number = attr.ib(default=42) # type: int
+ list_of_numbers = attr.ib(factory=list, type=list[int])
+
+
+pyright
+-------
+
+``attrs`` provides support for pyright_ though the dataclass_transform_ specification.
+This provides static type inference for a subset of ``attrs`` equivalent to standard-library ``dataclasses``,
+and requires explicit type annotations using the `attrs.define` or ``@attr.s(auto_attribs=True)`` API.
+
+Given the following definition, ``pyright`` will generate static type signatures for ``SomeClass`` attribute access, ``__init__``, ``__eq__``, and comparison methods::
+
+ @attr.define
+ class SomeClass:
+ a_number: int = 42
+ list_of_numbers: list[int] = attr.field(factory=list)
+
+.. warning::
+
+ The ``pyright`` inferred types are a subset of those supported by ``mypy``, including:
+
+ - The generated ``__init__`` signature only includes the attribute type annotations.
+ It currently does not include attribute ``converter`` types.
+
+ - The ``attr.frozen`` decorator is not typed with frozen attributes, which are properly typed via ``attr.define(frozen=True)``.
+
+ A `full list <https://github.com/microsoft/pyright/blob/main/specs/dataclass_transforms.md#attrs>`_ of limitations and incompatibilities can be found in pyright's repository.
+
+ Your constructive feedback is welcome in both `attrs#795 <https://github.com/python-attrs/attrs/issues/795>`_ and `pyright#1782 <https://github.com/microsoft/pyright/discussions/1782>`_.
+ Generally speaking, the decision on improving ``attrs`` support in pyright is entirely Microsoft's prerogative though.
+
+
+.. _mypy: http://mypy-lang.org
+.. _pytype: https://google.github.io/pytype/
+.. _pyright: https://github.com/microsoft/pyright
+.. _dataclass_transform: https://github.com/microsoft/pyright/blob/main/specs/dataclass_transforms.md
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/why.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/why.rst
new file mode 100644
index 0000000000..2c0ca4cd66
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/why.rst
@@ -0,0 +1,290 @@
+Why not…
+========
+
+
+If you'd like third party's account why ``attrs`` is great, have a look at Glyph's `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_!
+
+
+…Data Classes?
+--------------
+
+:pep:`557` added Data Classes to `Python 3.7 <https://docs.python.org/3.7/whatsnew/3.7.html#dataclasses>`_ that resemble ``attrs`` in many ways.
+
+They are the result of the Python community's `wish <https://mail.python.org/pipermail/python-ideas/2017-May/045618.html>`_ to have an easier way to write classes in the standard library that doesn't carry the problems of ``namedtuple``\ s.
+To that end, ``attrs`` and its developers were involved in the PEP process and while we may disagree with some minor decisions that have been made, it's a fine library and if it stops you from abusing ``namedtuple``\ s, they are a huge win.
+
+Nevertheless, there are still reasons to prefer ``attrs`` over Data Classes.
+Whether they're relevant to *you* depends on your circumstances:
+
+- Data Classes are *intentionally* less powerful than ``attrs``.
+ There is a long list of features that were sacrificed for the sake of simplicity and while the most obvious ones are validators, converters, :ref:`equality customization <custom-comparison>`, or :doc:`extensibility <extending>` in general, it permeates throughout all APIs.
+
+ On the other hand, Data Classes currently do not offer any significant feature that ``attrs`` doesn't already have.
+- ``attrs`` supports all mainstream Python versions, including CPython 2.7 and PyPy.
+- ``attrs`` doesn't force type annotations on you if you don't like them.
+- But since it **also** supports typing, it's the best way to embrace type hints *gradually*, too.
+- While Data Classes are implementing features from ``attrs`` every now and then, their presence is dependent on the Python version, not the package version.
+ For example, support for ``__slots__`` has only been added in Python 3.10.
+ That is especially painful for PyPI packages that support multiple Python versions.
+ This includes possible implementation bugs.
+- ``attrs`` can and will move faster.
+ We are not bound to any release schedules and we have a clear deprecation policy.
+
+ One of the `reasons <https://www.python.org/dev/peps/pep-0557/#why-not-just-use-attrs>`_ to not vendor ``attrs`` in the standard library was to not impede ``attrs``'s future development.
+
+One way to think about ``attrs`` vs Data Classes is that ``attrs`` is a fully-fledged toolkit to write powerful classes while Data Classes are an easy way to get a class with some attributes.
+Basically what ``attrs`` was in 2015.
+
+
+…pydantic?
+----------
+
+*pydantic* is first an foremost a *data validation library*.
+As such, it is a capable complement to class building libraries like ``attrs`` (or Data Classes!) for parsing and validating untrusted data.
+
+However, as convenient as it might be, using it for your business or data layer `is problematic in several ways <https://threeofwands.com/why-i-use-attrs-instead-of-pydantic/>`_:
+Is it really necessary to re-validate all your objects while reading them from a trusted database?
+In the parlance of `Form, Command, and Model Validation <https://verraes.net/2015/02/form-command-model-validation/>`_, *pydantic* is the right tool for *Commands*.
+
+`Separation of concerns <https://en.wikipedia.org/wiki/Separation_of_concerns>`_ feels tedious at times, but it's one of those things that you get to appreciate once you've shot your own foot often enough.
+
+
+…namedtuples?
+-------------
+
+`collections.namedtuple`\ s are tuples with names, not classes. [#history]_
+Since writing classes is tiresome in Python, every now and then someone discovers all the typing they could save and gets really excited.
+However, that convenience comes at a price.
+
+The most obvious difference between ``namedtuple``\ s and ``attrs``-based classes is that the latter are type-sensitive:
+
+.. doctest::
+
+ >>> import attr
+ >>> C1 = attr.make_class("C1", ["a"])
+ >>> C2 = attr.make_class("C2", ["a"])
+ >>> i1 = C1(1)
+ >>> i2 = C2(1)
+ >>> i1.a == i2.a
+ True
+ >>> i1 == i2
+ False
+
+…while a ``namedtuple`` is *intentionally* `behaving like a tuple`_ which means the type of a tuple is *ignored*:
+
+.. doctest::
+
+ >>> from collections import namedtuple
+ >>> NT1 = namedtuple("NT1", "a")
+ >>> NT2 = namedtuple("NT2", "b")
+ >>> t1 = NT1(1)
+ >>> t2 = NT2(1)
+ >>> t1 == t2 == (1,)
+ True
+
+Other often surprising behaviors include:
+
+- Since they are a subclass of tuples, ``namedtuple``\ s have a length and are both iterable and indexable.
+ That's not what you'd expect from a class and is likely to shadow subtle typo bugs.
+- Iterability also implies that it's easy to accidentally unpack a ``namedtuple`` which leads to hard-to-find bugs. [#iter]_
+- ``namedtuple``\ s have their methods *on your instances* whether you like it or not. [#pollution]_
+- ``namedtuple``\ s are *always* immutable.
+ Not only does that mean that you can't decide for yourself whether your instances should be immutable or not, it also means that if you want to influence your class' initialization (validation? default values?), you have to implement :meth:`__new__() <object.__new__>` which is a particularly hacky and error-prone requirement for a very common problem. [#immutable]_
+- To attach methods to a ``namedtuple`` you have to subclass it.
+ And if you follow the standard library documentation's recommendation of::
+
+ class Point(namedtuple('Point', ['x', 'y'])):
+ # ...
+
+ you end up with a class that has *two* ``Point``\ s in its :attr:`__mro__ <class.__mro__>`: ``[<class 'point.Point'>, <class 'point.Point'>, <type 'tuple'>, <type 'object'>]``.
+
+ That's not only confusing, it also has very practical consequences:
+ for example if you create documentation that includes class hierarchies like `Sphinx's autodoc <https://www.sphinx-doc.org/en/stable/usage/extensions/autodoc.html>`_ with ``show-inheritance``.
+ Again: common problem, hacky solution with confusing fallout.
+
+All these things make ``namedtuple``\ s a particularly poor choice for public APIs because all your objects are irrevocably tainted.
+With ``attrs`` your users won't notice a difference because it creates regular, well-behaved classes.
+
+.. admonition:: Summary
+
+ If you want a *tuple with names*, by all means: go for a ``namedtuple``. [#perf]_
+ But if you want a class with methods, you're doing yourself a disservice by relying on a pile of hacks that requires you to employ even more hacks as your requirements expand.
+
+ Other than that, ``attrs`` also adds nifty features like validators, converters, and (mutable!) default values.
+
+
+.. rubric:: Footnotes
+
+.. [#history] The word is that ``namedtuple``\ s were added to the Python standard library as a way to make tuples in return values more readable.
+ And indeed that is something you see throughout the standard library.
+
+ Looking at what the makers of ``namedtuple``\ s use it for themselves is a good guideline for deciding on your own use cases.
+.. [#pollution] ``attrs`` only adds a single attribute: ``__attrs_attrs__`` for introspection.
+ All helpers are functions in the ``attr`` package.
+ Since they take the instance as first argument, you can easily attach them to your classes under a name of your own choice.
+.. [#iter] `attr.astuple` can be used to get that behavior in ``attrs`` on *explicit demand*.
+.. [#immutable] ``attrs`` offers *optional* immutability through the ``frozen`` keyword.
+.. [#perf] Although ``attrs`` would serve you just as well!
+ Since both employ the same method of writing and compiling Python code for you, the performance penalty is negligible at worst and in some cases ``attrs`` is even faster if you use ``slots=True`` (which is generally a good idea anyway).
+
+.. _behaving like a tuple: https://docs.python.org/3/tutorial/datastructures.html#tuples-and-sequences
+
+
+…tuples?
+--------
+
+Readability
+^^^^^^^^^^^
+
+What makes more sense while debugging::
+
+ Point(x=1, y=2)
+
+or::
+
+ (1, 2)
+
+?
+
+Let's add even more ambiguity::
+
+ Customer(id=42, reseller=23, first_name="Jane", last_name="John")
+
+or::
+
+ (42, 23, "Jane", "John")
+
+?
+
+Why would you want to write ``customer[2]`` instead of ``customer.first_name``?
+
+Don't get me started when you add nesting.
+If you've never run into mysterious tuples you had no idea what the hell they meant while debugging, you're much smarter than yours truly.
+
+Using proper classes with names and types makes program code much more readable and comprehensible_.
+Especially when trying to grok a new piece of software or returning to old code after several months.
+
+.. _comprehensible: https://arxiv.org/pdf/1304.5257.pdf
+
+
+Extendability
+^^^^^^^^^^^^^
+
+Imagine you have a function that takes or returns a tuple.
+Especially if you use tuple unpacking (eg. ``x, y = get_point()``), adding additional data means that you have to change the invocation of that function *everywhere*.
+
+Adding an attribute to a class concerns only those who actually care about that attribute.
+
+
+…dicts?
+-------
+
+Dictionaries are not for fixed fields.
+
+If you have a dict, it maps something to something else.
+You should be able to add and remove values.
+
+``attrs`` lets you be specific about those expectations; a dictionary does not.
+It gives you a named entity (the class) in your code, which lets you explain in other places whether you take a parameter of that class or return a value of that class.
+
+In other words: if your dict has a fixed and known set of keys, it is an object, not a hash.
+So if you never iterate over the keys of a dict, you should use a proper class.
+
+
+…hand-written classes?
+----------------------
+
+While we're fans of all things artisanal, writing the same nine methods again and again doesn't qualify.
+I usually manage to get some typos inside and there's simply more code that can break and thus has to be tested.
+
+To bring it into perspective, the equivalent of
+
+.. doctest::
+
+ >>> @attr.s
+ ... class SmartClass(object):
+ ... a = attr.ib()
+ ... b = attr.ib()
+ >>> SmartClass(1, 2)
+ SmartClass(a=1, b=2)
+
+is roughly
+
+.. doctest::
+
+ >>> class ArtisanalClass(object):
+ ... def __init__(self, a, b):
+ ... self.a = a
+ ... self.b = b
+ ...
+ ... def __repr__(self):
+ ... return "ArtisanalClass(a={}, b={})".format(self.a, self.b)
+ ...
+ ... def __eq__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) == (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __ne__(self, other):
+ ... result = self.__eq__(other)
+ ... if result is NotImplemented:
+ ... return NotImplemented
+ ... else:
+ ... return not result
+ ...
+ ... def __lt__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) < (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __le__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) <= (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __gt__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) > (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __ge__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) >= (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __hash__(self):
+ ... return hash((self.__class__, self.a, self.b))
+ >>> ArtisanalClass(a=1, b=2)
+ ArtisanalClass(a=1, b=2)
+
+which is quite a mouthful and it doesn't even use any of ``attrs``'s more advanced features like validators or defaults values.
+Also: no tests whatsoever.
+And who will guarantee you, that you don't accidentally flip the ``<`` in your tenth implementation of ``__gt__``?
+
+It also should be noted that ``attrs`` is not an all-or-nothing solution.
+You can freely choose which features you want and disable those that you want more control over:
+
+.. doctest::
+
+ >>> @attr.s(repr=False)
+ ... class SmartClass(object):
+ ... a = attr.ib()
+ ... b = attr.ib()
+ ...
+ ... def __repr__(self):
+ ... return "<SmartClass(a=%d)>" % (self.a,)
+ >>> SmartClass(1, 2)
+ <SmartClass(a=1)>
+
+.. admonition:: Summary
+
+ If you don't care and like typing, we're not gonna stop you.
+
+ However it takes a lot of bias and determined rationalization to claim that ``attrs`` raises the mental burden on a project given how difficult it is to find the important bits in a hand-written class and how annoying it is to ensure you've copy-pasted your code correctly over all your classes.
+
+ In any case, if you ever get sick of the repetitiveness and drowning important code in a sea of boilerplate, ``attrs`` will be waiting for you.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/mypy.ini b/testing/web-platform/tests/tools/third_party/attrs/mypy.ini
new file mode 100644
index 0000000000..685c02599f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/mypy.ini
@@ -0,0 +1,3 @@
+[mypy]
+disallow_untyped_defs = True
+check_untyped_defs = True
diff --git a/testing/web-platform/tests/tools/third_party/attrs/pyproject.toml b/testing/web-platform/tests/tools/third_party/attrs/pyproject.toml
new file mode 100644
index 0000000000..52c0e49ec2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/pyproject.toml
@@ -0,0 +1,71 @@
+[build-system]
+requires = ["setuptools>=40.6.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+
+[tool.coverage.run]
+parallel = true
+branch = true
+source = ["attr", "attrs"]
+
+[tool.coverage.paths]
+source = ["src", ".tox/*/site-packages"]
+
+[tool.coverage.report]
+show_missing = true
+exclude_lines = [
+ "pragma: no cover",
+ # PyPy is unacceptably slow under coverage.
+ "if PYPY:",
+]
+
+
+[tool.black]
+line-length = 79
+extend-exclude = '''
+# Exclude pattern matching test till black gains Python 3.10 support
+.*test_pattern_matching.*
+'''
+
+
+[tool.interrogate]
+verbose = 2
+fail-under = 100
+whitelist-regex = ["test_.*"]
+
+
+[tool.check-wheel-contents]
+toplevel = ["attr", "attrs"]
+
+
+[tool.isort]
+profile = "attrs"
+
+
+[tool.towncrier]
+ package = "attr"
+ package_dir = "src"
+ filename = "CHANGELOG.rst"
+ template = "changelog.d/towncrier_template.rst"
+ issue_format = "`#{issue} <https://github.com/python-attrs/attrs/issues/{issue}>`_"
+ directory = "changelog.d"
+ title_format = "{version} ({project_date})"
+ underlines = ["-", "^"]
+
+ [[tool.towncrier.section]]
+ path = ""
+
+ [[tool.towncrier.type]]
+ directory = "breaking"
+ name = "Backward-incompatible Changes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "deprecation"
+ name = "Deprecations"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "change"
+ name = "Changes"
+ showcontent = true
diff --git a/testing/web-platform/tests/tools/third_party/attrs/setup.py b/testing/web-platform/tests/tools/third_party/attrs/setup.py
new file mode 100644
index 0000000000..00e7b012ae
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/setup.py
@@ -0,0 +1,151 @@
+# SPDX-License-Identifier: MIT
+
+import codecs
+import os
+import platform
+import re
+import sys
+
+from setuptools import find_packages, setup
+
+
+###############################################################################
+
+NAME = "attrs"
+PACKAGES = find_packages(where="src")
+META_PATH = os.path.join("src", "attr", "__init__.py")
+KEYWORDS = ["class", "attribute", "boilerplate"]
+PROJECT_URLS = {
+ "Documentation": "https://www.attrs.org/",
+ "Changelog": "https://www.attrs.org/en/stable/changelog.html",
+ "Bug Tracker": "https://github.com/python-attrs/attrs/issues",
+ "Source Code": "https://github.com/python-attrs/attrs",
+ "Funding": "https://github.com/sponsors/hynek",
+ "Tidelift": "https://tidelift.com/subscription/pkg/pypi-attrs?"
+ "utm_source=pypi-attrs&utm_medium=pypi",
+ "Ko-fi": "https://ko-fi.com/the_hynek",
+}
+CLASSIFIERS = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Natural Language :: English",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+]
+INSTALL_REQUIRES = []
+EXTRAS_REQUIRE = {
+ "docs": ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"],
+ "tests_no_zope": [
+ # For regression test to ensure cloudpickle compat doesn't break.
+ 'cloudpickle; python_implementation == "CPython"',
+ # 5.0 introduced toml; parallel was broken until 5.0.2
+ "coverage[toml]>=5.0.2",
+ "hypothesis",
+ "pympler",
+ "pytest>=4.3.0", # 4.3.0 dropped last use of `convert`
+ "six",
+ ],
+}
+if (
+ sys.version_info[:2] >= (3, 6)
+ and platform.python_implementation() != "PyPy"
+):
+ EXTRAS_REQUIRE["tests_no_zope"].extend(["mypy", "pytest-mypy-plugins"])
+
+EXTRAS_REQUIRE["tests"] = EXTRAS_REQUIRE["tests_no_zope"] + ["zope.interface"]
+EXTRAS_REQUIRE["dev"] = (
+ EXTRAS_REQUIRE["tests"] + EXTRAS_REQUIRE["docs"] + ["pre-commit"]
+)
+
+###############################################################################
+
+HERE = os.path.abspath(os.path.dirname(__file__))
+
+
+def read(*parts):
+ """
+ Build an absolute path from *parts* and return the contents of the
+ resulting file. Assume UTF-8 encoding.
+ """
+ with codecs.open(os.path.join(HERE, *parts), "rb", "utf-8") as f:
+ return f.read()
+
+
+META_FILE = read(META_PATH)
+
+
+def find_meta(meta):
+ """
+ Extract __*meta*__ from META_FILE.
+ """
+ meta_match = re.search(
+ r"^__{meta}__ = ['\"]([^'\"]*)['\"]".format(meta=meta), META_FILE, re.M
+ )
+ if meta_match:
+ return meta_match.group(1)
+ raise RuntimeError("Unable to find __{meta}__ string.".format(meta=meta))
+
+
+LOGO = """
+.. image:: https://www.attrs.org/en/stable/_static/attrs_logo.png
+ :alt: attrs logo
+ :align: center
+""" # noqa
+
+VERSION = find_meta("version")
+URL = find_meta("url")
+LONG = (
+ LOGO
+ + read("README.rst").split(".. teaser-begin")[1]
+ + "\n\n"
+ + "Release Information\n"
+ + "===================\n\n"
+ + re.search(
+ r"(\d+.\d.\d \(.*?\)\r?\n.*?)\r?\n\r?\n\r?\n----\r?\n\r?\n\r?\n",
+ read("CHANGELOG.rst"),
+ re.S,
+ ).group(1)
+ + "\n\n`Full changelog "
+ + "<{url}en/stable/changelog.html>`_.\n\n".format(url=URL)
+ + read("AUTHORS.rst")
+)
+
+
+if __name__ == "__main__":
+ setup(
+ name=NAME,
+ description=find_meta("description"),
+ license=find_meta("license"),
+ url=URL,
+ project_urls=PROJECT_URLS,
+ version=VERSION,
+ author=find_meta("author"),
+ author_email=find_meta("email"),
+ maintainer=find_meta("author"),
+ maintainer_email=find_meta("email"),
+ keywords=KEYWORDS,
+ long_description=LONG,
+ long_description_content_type="text/x-rst",
+ packages=PACKAGES,
+ package_dir={"": "src"},
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+ zip_safe=False,
+ classifiers=CLASSIFIERS,
+ install_requires=INSTALL_REQUIRES,
+ extras_require=EXTRAS_REQUIRE,
+ include_package_data=True,
+ options={"bdist_wheel": {"universal": "1"}},
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py
new file mode 100644
index 0000000000..f95c96dd57
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py
@@ -0,0 +1,80 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+from functools import partial
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._version_info import VersionInfo
+
+
+__version__ = "21.4.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
+
+__title__ = "attrs"
+__description__ = "Classes Without Boilerplate"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
+__doc__ = __description__ + " <" + __uri__ + ">"
+
+__author__ = "Hynek Schlawack"
+__email__ = "hs@ox.cx"
+
+__license__ = "MIT"
+__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+__all__ = [
+ "Attribute",
+ "Factory",
+ "NOTHING",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "evolve",
+ "exceptions",
+ "fields",
+ "fields_dict",
+ "filters",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+if sys.version_info[:2] >= (3, 6):
+ from ._next_gen import define, field, frozen, mutable # noqa: F401
+
+ __all__.extend(("define", "field", "frozen", "mutable"))
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi
new file mode 100644
index 0000000000..c0a2126503
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi
@@ -0,0 +1,484 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._version_info import VersionInfo
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = Union[bool, Callable[[Any], Any]]
+_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
+_ConverterType = Callable[[Any], Any]
+_FilterType = Callable[[Attribute[_T], _T], bool]
+_ReprType = Callable[[Any], str]
+_ReprArgType = Union[bool, _ReprType]
+_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
+_OnSetAttrArgType = Union[
+ _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
+]
+_FieldTransformer = Callable[
+ [type, List[Attribute[Any]]], List[Attribute[Any]]
+]
+_CompareWithType = Callable[[Any, Any], bool]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
+
+# _make --
+
+NOTHING: object
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+if sys.version_info >= (3, 8):
+ from typing import Literal
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+ ) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+ ) -> _T: ...
+
+else:
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
+ takes_self: bool = ...,
+ ) -> _T: ...
+
+# Static type inference support via __dataclass_transform__ implemented as per:
+# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
+# This annotation must be applied to all overloads of "define" and "attrs"
+#
+# NOTE: This is a typing construct and does not exist at runtime. Extensions
+# wrapping attrs decorators should declare a separate __dataclass_transform__
+# signature in the extension module using the specification linked above to
+# provide pyright support.
+def __dataclass_transform__(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+) -> Callable[[_T], _T]: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: Optional[_T]
+ validator: Optional[_ValidatorType[_T]]
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: Optional[bool]
+ init: bool
+ converter: Optional[_ConverterType]
+ metadata: Dict[Any, Any]
+ type: Optional[Type[_T]]
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=<some callable>) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: object = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+frozen = define # they differ only in their defaults
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+class _Fields(Tuple[Attribute[Any], ...]):
+ def __getattr__(self, name: str) -> Attribute[Any]: ...
+
+def fields(cls: type) -> _Fields: ...
+def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
+def validate(inst: Any) -> None: ...
+def resolve_types(
+ cls: _C,
+ globalns: Optional[Dict[str, Any]] = ...,
+ localns: Optional[Dict[str, Any]] = ...,
+ attribs: Optional[List[Attribute[Any]]] = ...,
+) -> _C: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
+ bases: Tuple[type, ...] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[
+ Callable[[type, Attribute[Any], Any], Any]
+ ] = ...,
+ tuple_keys: Optional[bool] = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
+def has(cls: type) -> bool: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py
new file mode 100644
index 0000000000..6cffa4dbab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py
@@ -0,0 +1,154 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import functools
+
+from ._compat import new_class
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
+ ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if
+ at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ :param Optional[callable] eq: `callable` used to evaluate equality
+ of two objects.
+ :param Optional[callable] lt: `callable` used to evaluate whether
+ one object is less than another object.
+ :param Optional[callable] le: `callable` used to evaluate whether
+ one object is less than or equal to another object.
+ :param Optional[callable] gt: `callable` used to evaluate whether
+ one object is greater than another object.
+ :param Optional[callable] ge: `callable` used to evaluate whether
+ one object is greater than or equal to another object.
+
+ :param bool require_same_type: When `True`, equality and ordering methods
+ will return `NotImplemented` if objects are not of the same type.
+
+ :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = _make_ne()
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ raise ValueError(
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = "__%s__" % (name,)
+ method.__doc__ = "Return a %s b. Computed by attrs." % (
+ _operation_names[name],
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ for func in self._requirements:
+ if not func(self, other):
+ return False
+ return True
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi
new file mode 100644
index 0000000000..e71aaff7a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi
@@ -0,0 +1,13 @@
+from typing import Type
+
+from . import _CompareWithType
+
+def cmp_using(
+ eq: Optional[_CompareWithType],
+ lt: Optional[_CompareWithType],
+ le: Optional[_CompareWithType],
+ gt: Optional[_CompareWithType],
+ ge: Optional[_CompareWithType],
+ require_same_type: bool,
+ class_name: str,
+) -> Type: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py
new file mode 100644
index 0000000000..dc0cb02b64
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py
@@ -0,0 +1,261 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import platform
+import sys
+import threading
+import types
+import warnings
+
+
+PY2 = sys.version_info[0] == 2
+PYPY = platform.python_implementation() == "PyPy"
+PY36 = sys.version_info[:2] >= (3, 6)
+HAS_F_STRINGS = PY36
+PY310 = sys.version_info[:2] >= (3, 10)
+
+
+if PYPY or PY36:
+ ordered_dict = dict
+else:
+ from collections import OrderedDict
+
+ ordered_dict = OrderedDict
+
+
+if PY2:
+ from collections import Mapping, Sequence
+
+ from UserDict import IterableUserDict
+
+ # We 'bundle' isclass instead of using inspect as importing inspect is
+ # fairly expensive (order of 10-15 ms for a modern machine in 2016)
+ def isclass(klass):
+ return isinstance(klass, (type, types.ClassType))
+
+ def new_class(name, bases, kwds, exec_body):
+ """
+ A minimal stub of types.new_class that we need for make_class.
+ """
+ ns = {}
+ exec_body(ns)
+
+ return type(name, bases, ns)
+
+ # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
+ TYPE = "type"
+
+ def iteritems(d):
+ return d.iteritems()
+
+ # Python 2 is bereft of a read-only dict proxy, so we make one!
+ class ReadOnlyDict(IterableUserDict):
+ """
+ Best-effort read-only dict wrapper.
+ """
+
+ def __setitem__(self, key, val):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item assignment"
+ )
+
+ def update(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'update'"
+ )
+
+ def __delitem__(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item deletion"
+ )
+
+ def clear(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'clear'"
+ )
+
+ def pop(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'pop'"
+ )
+
+ def popitem(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'popitem'"
+ )
+
+ def setdefault(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'setdefault'"
+ )
+
+ def __repr__(self):
+ # Override to be identical to the Python 3 version.
+ return "mappingproxy(" + repr(self.data) + ")"
+
+ def metadata_proxy(d):
+ res = ReadOnlyDict()
+ res.data.update(d) # We blocked update, so we have to do it like this.
+ return res
+
+ def just_warn(*args, **kw): # pragma: no cover
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+
+else: # Python 3 and later.
+ from collections.abc import Mapping, Sequence # noqa
+
+ def just_warn(*args, **kw):
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+ warnings.warn(
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+
+ def isclass(klass):
+ return isinstance(klass, type)
+
+ TYPE = "class"
+
+ def iteritems(d):
+ return d.items()
+
+ new_class = types.new_class
+
+ def metadata_proxy(d):
+ return types.MappingProxyType(dict(d))
+
+
+def make_set_closure_cell():
+ """Return a function of two arguments (cell, value) which sets
+ the value stored in the closure cell `cell` to `value`.
+ """
+ # pypy makes this easy. (It also supports the logic below, but
+ # why not do the easy/fast thing?)
+ if PYPY:
+
+ def set_closure_cell(cell, value):
+ cell.__setstate__((value,))
+
+ return set_closure_cell
+
+ # Otherwise gotta do it the hard way.
+
+ # Create a function that will set its first cellvar to `value`.
+ def set_first_cellvar_to(value):
+ x = value
+ return
+
+ # This function will be eliminated as dead code, but
+ # not before its reference to `x` forces `x` to be
+ # represented as a closure cell rather than a local.
+ def force_x_to_be_a_cell(): # pragma: no cover
+ return x
+
+ try:
+ # Extract the code object and make sure our assumptions about
+ # the closure behavior are correct.
+ if PY2:
+ co = set_first_cellvar_to.func_code
+ else:
+ co = set_first_cellvar_to.__code__
+ if co.co_cellvars != ("x",) or co.co_freevars != ():
+ raise AssertionError # pragma: no cover
+
+ # Convert this code object to a code object that sets the
+ # function's first _freevar_ (not cellvar) to the argument.
+ if sys.version_info >= (3, 8):
+ # CPython 3.8+ has an incompatible CodeType signature
+ # (added a posonlyargcount argument) but also added
+ # CodeType.replace() to do this without counting parameters.
+ set_first_freevar_code = co.replace(
+ co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
+ )
+ else:
+ args = [co.co_argcount]
+ if not PY2:
+ args.append(co.co_kwonlyargcount)
+ args.extend(
+ [
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ # These two arguments are reversed:
+ co.co_cellvars,
+ co.co_freevars,
+ ]
+ )
+ set_first_freevar_code = types.CodeType(*args)
+
+ def set_closure_cell(cell, value):
+ # Create a function using the set_first_freevar_code,
+ # whose first closure cell is `cell`. Calling it will
+ # change the value of that cell.
+ setter = types.FunctionType(
+ set_first_freevar_code, {}, "setter", (), (cell,)
+ )
+ # And call it to set the cell.
+ setter(value)
+
+ # Make sure it works on this interpreter:
+ def make_func_with_cell():
+ x = None
+
+ def func():
+ return x # pragma: no cover
+
+ return func
+
+ if PY2:
+ cell = make_func_with_cell().func_closure[0]
+ else:
+ cell = make_func_with_cell().__closure__[0]
+ set_closure_cell(cell, 100)
+ if cell.cell_contents != 100:
+ raise AssertionError # pragma: no cover
+
+ except Exception:
+ return just_warn
+ else:
+ return set_closure_cell
+
+
+set_closure_cell = make_set_closure_cell()
+
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py
new file mode 100644
index 0000000000..fc9be29d00
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py
@@ -0,0 +1,33 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+ instead.
+ """
+ if not isinstance(run, bool):
+ raise TypeError("'run' must be bool.")
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+ instead.
+ """
+ return _run_validators
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py
new file mode 100644
index 0000000000..4c90085a40
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py
@@ -0,0 +1,422 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+
+from ._compat import iteritems
+from ._make import NOTHING, _obj_setattr, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a dict.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attrs.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable dict_factory: A callable to produce dictionaries from. For
+ example, to produce ordered dictionaries instead of normal Python
+ dictionaries, pass in ``collections.OrderedDict``.
+ :param bool retain_collection_types: Do not convert to ``list`` when
+ encountering an attribute whose type is ``tuple`` or ``set``. Only
+ meaningful if ``recurse`` is ``True``.
+ :param Optional[callable] value_serializer: A hook that is called for every
+ attribute or dict key/value. It receives the current instance, field
+ and value and must return the (updated) value. The hook is run *after*
+ the optional *filter* has been applied.
+
+ :rtype: return type of *dict_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ .. versionadded:: 21.3.0 If a dict has a collection for a key, it is
+ serialized as a tuple.
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ if has(v.__class__):
+ rv[a.name] = asdict(
+ v,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain_collection_types is True else list
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in v
+ ]
+ )
+ elif isinstance(v, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in iteritems(v)
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ is_key,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(val, (tuple, list, set, frozenset)):
+ if retain_collection_types is True:
+ cf = val.__class__
+ elif is_key:
+ cf = tuple
+ else:
+ cf = list
+
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in iteritems(val)
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a tuple.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attrs.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable tuple_factory: A callable to produce tuples from. For
+ example, to produce lists instead of tuples.
+ :param bool retain_collection_types: Do not convert to ``list``
+ or ``dict`` when encountering an attribute which type is
+ ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
+ ``True``.
+
+ :rtype: return type of *tuple_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
+ elif isinstance(v, dict):
+ df = v.__class__ if retain is True else dict
+ rv.append(
+ df(
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
+ )
+ for kk, vv in iteritems(v)
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with ``attrs`` attributes.
+
+ :param type cls: Class to introspect.
+ :raise TypeError: If *cls* is not a class.
+
+ :rtype: bool
+ """
+ return getattr(cls, "__attrs_attrs__", None) is not None
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
+ be found on *cls*.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. deprecated:: 17.1.0
+ Use `attrs.evolve` instead if you can.
+ This function will not be removed du to the slightly different approach
+ compared to `attrs.evolve`.
+ """
+ import warnings
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in iteritems(changes):
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ raise AttrsAttributeNotFoundError(
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
+ )
+ _obj_setattr(new, k, v)
+ return new
+
+
+def evolve(inst, **changes):
+ """
+ Create a new instance, based on *inst* with *changes* applied.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise TypeError: If *attr_name* couldn't be found in the class
+ ``__init__``.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 17.1.0
+ """
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+def resolve_types(cls, globalns=None, localns=None, attribs=None):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in `Attribute`'s *type*
+ field. In other words, you don't need to resolve your types if you only
+ use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, e.g. if the name only exists
+ inside a method, you may pass *globalns* or *localns* to specify other
+ dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ :param type cls: Class to resolve.
+ :param Optional[dict] globalns: Dictionary containing global variables.
+ :param Optional[dict] localns: Dictionary containing local variables.
+ :param Optional[list] attribs: List of attribs for the given class.
+ This is necessary when calling from inside a ``field_transformer``
+ since *cls* is not an ``attrs`` class yet.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class and you didn't pass any attribs.
+ :raise NameError: If types cannot be resolved because of missing variables.
+
+ :returns: *cls* so you can use this function also as a class decorator.
+ Please note that you have to apply it **after** `attrs.define`. That
+ means the decorator has to come in the line **before** `attrs.define`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+
+ """
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
+ import typing
+
+ hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _obj_setattr(field, "type", hints[field.name])
+ # We store the class we resolved so that subclasses know they haven't
+ # been resolved.
+ cls.__attrs_types_resolved__ = cls
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py
new file mode 100644
index 0000000000..d46f8a3e7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py
@@ -0,0 +1,3173 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+import inspect
+import linecache
+import sys
+import warnings
+
+from operator import itemgetter
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+ HAS_F_STRINGS,
+ PY2,
+ PY310,
+ PYPY,
+ isclass,
+ iteritems,
+ metadata_proxy,
+ new_class,
+ ordered_dict,
+ set_closure_cell,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+ UnannotatedAttributeError,
+)
+
+
+if not PY2:
+ import typing
+
+
+# This is used at least twice, so cache it here.
+_obj_setattr = object.__setattr__
+_init_converter_pat = "__attr_converter_%s"
+_init_factory_pat = "__attr_factory_{}"
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = metadata_proxy({})
+
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
+_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing(object):
+ """
+ Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
+
+ ``_Nothing`` is a singleton. There is only ever one of it.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ """
+
+ _singleton = None
+
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super(_Nothing, cls).__new__(cls)
+ return _Nothing._singleton
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+ def __len__(self):
+ return 0 # __bool__ for Python 2
+
+
+NOTHING = _Nothing()
+"""
+Sentinel to indicate the lack of a value when ``None`` is ambiguous.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since ``None``
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ if PY2:
+ # For some reason `type(None)` isn't callable in Python 2, but we don't
+ # actually need a constructor for None objects, we just need any
+ # available function that returns None.
+ def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)):
+ return _none_constructor, _args
+
+ else:
+
+ def __reduce__(self, _none_constructor=type(None), _args=()):
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Create a new attribute on a class.
+
+ .. warning::
+
+ Does *not* do anything unless the class is also decorated with
+ `attr.s`!
+
+ :param default: A value that is used if an ``attrs``-generated ``__init__``
+ is used and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `attrs.Factory`, its callable will be
+ used to construct a new value (useful for mutable data types like lists
+ or dicts).
+
+ If a default is not set (or set manually to `attrs.NOTHING`), a value
+ *must* be supplied when instantiating; otherwise a `TypeError`
+ will be raised.
+
+ The default can also be set using decorator notation as shown below.
+
+ :type default: Any value
+
+ :param callable factory: Syntactic sugar for
+ ``default=attr.Factory(factory)``.
+
+ :param validator: `callable` that is called by ``attrs``-generated
+ ``__init__`` methods after the instance has been initialized. They
+ receive the initialized instance, the :func:`~attrs.Attribute`, and the
+ passed value.
+
+ The return value is *not* inspected so the validator has to throw an
+ exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `get_run_validators`.
+
+ The validator can also be set using decorator notation as shown below.
+
+ :type validator: `callable` or a `list` of `callable`\\ s.
+
+ :param repr: Include this attribute in the generated ``__repr__``
+ method. If ``True``, include the attribute; if ``False``, omit it. By
+ default, the built-in ``repr()`` function is used. To override how the
+ attribute value is formatted, pass a ``callable`` that takes a single
+ value and returns a string. Note that the resulting string is used
+ as-is, i.e. it will be used directly *instead* of calling ``repr()``
+ (the default).
+ :type repr: a `bool` or a `callable` to use a custom function.
+
+ :param eq: If ``True`` (default), include this attribute in the
+ generated ``__eq__`` and ``__ne__`` methods that check two instances
+ for equality. To override how the attribute value is compared,
+ pass a ``callable`` that takes a single value and returns the value
+ to be compared.
+ :type eq: a `bool` or a `callable`.
+
+ :param order: If ``True`` (default), include this attributes in the
+ generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+ To override how the attribute value is ordered,
+ pass a ``callable`` that takes a single value and returns the value
+ to be ordered.
+ :type order: a `bool` or a `callable`.
+
+ :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
+ same value. Must not be mixed with *eq* or *order*.
+ :type cmp: a `bool` or a `callable`.
+
+ :param Optional[bool] hash: Include this attribute in the generated
+ ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This
+ is the correct behavior according the Python spec. Setting this value
+ to anything else than ``None`` is *discouraged*.
+ :param bool init: Include this attribute in the generated ``__init__``
+ method. It is possible to set this to ``False`` and set a default
+ value. In that case this attributed is unconditionally initialized
+ with the specified default value or factory.
+ :param callable converter: `callable` that is called by
+ ``attrs``-generated ``__init__`` methods to convert attribute's value
+ to the desired format. It is given the passed-in value, and the
+ returned value will be used as the new value of the attribute. The
+ value is converted before being passed to the validator, if any.
+ :param metadata: An arbitrary mapping, to be used by third-party
+ components. See `extending_metadata`.
+ :param type: The type of the attribute. In Python 3.6 or greater, the
+ preferred method to specify the type is using a variable annotation
+ (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
+ This argument is provided for backward compatibility.
+ Regardless of the approach used, the type will be stored on
+ ``Attribute.type``.
+
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
+ `static type checking <types>`.
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param on_setattr: Allows to overwrite the *on_setattr* setting from
+ `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
+ Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `attr.s`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attrs.setters.NO_OP`
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is ``None`` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+ *convert* to achieve consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+
+ if factory is not None:
+ if default is not NOTHING:
+ raise ValueError(
+ "The `default` and `factory` arguments are mutually "
+ "exclusive."
+ )
+ if not callable(factory):
+ raise ValueError("The `factory` argument must be a callable.")
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ )
+
+
+def _compile_and_eval(script, globs, locs=None, filename=""):
+ """
+ "Exec" the script with the given global (globs) and local (locs) variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs=None):
+ """
+ Create the method with the script given and return the method object.
+ """
+ locs = {}
+ if globs is None:
+ globs = {}
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ count = 1
+ base_filename = filename
+ while True:
+ linecache_tuple = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
+ if old_val == linecache_tuple:
+ break
+ else:
+ filename = "{}-{}>".format(base_filename[:-1], count)
+ count += 1
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ return locs[name]
+
+
+def _make_attr_tuple_class(cls_name, attr_names):
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = "{}Attributes".format(cls_name)
+ attr_class_template = [
+ "class {}(tuple):".format(attr_class_name),
+ " __slots__ = ()",
+ ]
+ if attr_names:
+ for i, attr_name in enumerate(attr_names):
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
+ else:
+ attr_class_template.append(" pass")
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ _compile_and_eval("\n".join(attr_class_template), globs)
+ return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_classvar_prefixes)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+
+ Requires Python 3.
+ """
+ attr = getattr(cls, attrib_name, _sentinel)
+ if attr is _sentinel:
+ return False
+
+ for base_cls in cls.__mro__[1:]:
+ a = getattr(base_cls, attrib_name, None)
+ if attr is a:
+ return False
+
+ return True
+
+
+def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ if _has_own_attribute(cls, "__annotations__"):
+ return cls.__annotations__
+
+ return {}
+
+
+def _counter_getter(e):
+ """
+ Key function for sorting to avoid re-creating a lambda for every class.
+ """
+ return e[1].counter
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ *collect_by_mro* is True, collect them in the correct MRO order, otherwise
+ use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = [(name, ca) for name, ca in iteritems(these)]
+
+ if not isinstance(these, ordered_dict):
+ ca_list.sort(key=_counter_getter)
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if not isinstance(a, _CountingAttr):
+ if a is NOTHING:
+ a = attrib()
+ else:
+ a = attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ if kw_only:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = base_attrs + own_attrs
+
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ raise ValueError(
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: %r" % (a,)
+ )
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ if field_transformer is not None:
+ attrs = field_transformer(cls, attrs)
+
+ # Create AttrsClass *after* applying the field_transformer since it may
+ # add or remove attributes!
+ attr_names = [a.name for a in attrs]
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
+
+
+if PYPY:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError()
+
+else:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ raise FrozenInstanceError()
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ raise FrozenInstanceError()
+
+
+class _ClassBuilder(object):
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_pre_init",
+ "_has_post_init",
+ "_is_exc",
+ "_on_setattr",
+ "_slots",
+ "_weakref_slot",
+ "_wrote_own_setattr",
+ "_has_custom_setattr",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ getstate_setstate,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_custom_setattr,
+ field_transformer,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = set(a.name for a in base_attrs)
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+ self._on_setattr = on_setattr
+
+ self._has_custom_setattr = has_custom_setattr
+ self._wrote_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._wrote_own_setattr = True
+ elif on_setattr in (
+ _ng_default_on_setattr,
+ setters.validate,
+ setters.convert,
+ ):
+ has_validator = has_converter = False
+ for a in attrs:
+ if a.validator is not None:
+ has_validator = True
+ if a.converter is not None:
+ has_converter = True
+
+ if has_validator and has_converter:
+ break
+ if (
+ (
+ on_setattr == _ng_default_on_setattr
+ and not (has_validator or has_converter)
+ )
+ or (on_setattr == setters.validate and not has_validator)
+ or (on_setattr == setters.convert and not has_converter)
+ ):
+ # If class-level on_setattr is set to convert + validate, but
+ # there's no field to convert or validate, pretend like there's
+ # no on_setattr.
+ self._on_setattr = None
+
+ if getstate_setstate:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ def __repr__(self):
+ return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ return self._create_slots_class()
+ else:
+ return self._patch_original_class()
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _sentinel) is not _sentinel
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._wrote_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = object.__setattr__
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in iteritems(self._cls_dict)
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ }
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._wrote_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = object.__setattr__
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = dict()
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overriden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in iteritems(existing_slots)
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
+
+ qualname = getattr(self._cls, "__qualname__", None)
+ if qualname is not None:
+ cd["__qualname__"] = qualname
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # <https://github.com/python-attrs/attrs/issues/102>. On Python 3,
+ # if a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in cls.__dict__.values():
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ set_closure_cell(cell, cls)
+
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns, self._cls)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ raise ValueError(
+ "__str__ can only be generated if a __repr__ exists."
+ )
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return tuple(getattr(self, name) for name in state_attr_names)
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_hash_cache_field, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=False,
+ )
+ )
+
+ return self
+
+ def add_match_args(self):
+ self._cls_dict["__match_args__"] = tuple(
+ field.name
+ for field in self._attrs
+ if field.init and not field.kw_only
+ )
+
+ def add_attrs_init(self):
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=True,
+ )
+ )
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ cd["__eq__"] = self._add_method_dunders(
+ _make_eq(self._cls, self._attrs)
+ )
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ if self._frozen:
+ return self
+
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ raise ValueError(
+ "Can't combine custom __setattr__ with on_setattr hooks."
+ )
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _obj_setattr(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._wrote_own_setattr = True
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ try:
+ method.__module__ = self._cls.__module__
+ except AttributeError:
+ pass
+
+ try:
+ method.__qualname__ = ".".join(
+ (self._cls.__qualname__, method.__name__)
+ )
+ except AttributeError:
+ pass
+
+ try:
+ method.__doc__ = "Method generated by attrs for class %s." % (
+ self._cls.__qualname__,
+ )
+ except AttributeError:
+ pass
+
+ return method
+
+
+_CMP_DEPRECATION = (
+ "The usage of `cmp` is deprecated and will be removed on or after "
+ "2021-06-01. Please use `eq` and `order` instead."
+)
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+
+ auto_detect must be False on Python 2.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ A class decorator that adds `dunder
+ <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ :param these: A dictionary of name to `attr.ib` mappings. This is
+ useful to avoid the definition of your attributes within the class body
+ because you can't (e.g. if you want to add ``__repr__`` methods to
+ Django models) or don't want to.
+
+ If *these* is not ``None``, ``attrs`` will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ If *these* is an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the attributes inside *these*. Otherwise the order
+ of the definition of the attributes is used.
+
+ :type these: `dict` of `str` to `attr.ib`
+
+ :param str repr_ns: When using nested classes, there's no way in Python 2
+ to automatically detect that. Therefore it's possible to set the
+ namespace explicitly for a more meaningful ``repr`` output.
+ :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
+ *order*, and *hash* arguments explicitly, assume they are set to
+ ``True`` **unless any** of the involved methods for one of the
+ arguments is implemented in the *current* class (i.e. it is *not*
+ inherited from some base class).
+
+ So for example by implementing ``__eq__`` on a class yourself,
+ ``attrs`` will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
+ ``__ne__`` by default, so it *should* be enough to only implement
+ ``__eq__`` in most cases).
+
+ .. warning::
+
+ If you prevent ``attrs`` from creating the ordering methods for you
+ (``order=False``, e.g. by implementing ``__le__``), it becomes
+ *your* responsibility to make sure its ordering is sound. The best
+ way is to use the `functools.total_ordering` decorator.
+
+
+ Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
+ *cmp*, or *hash* overrides whatever *auto_detect* would determine.
+
+ *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
+ an `attrs.exceptions.PythonTooOldError`.
+
+ :param bool repr: Create a ``__repr__`` method with a human readable
+ representation of ``attrs`` attributes..
+ :param bool str: Create a ``__str__`` method that is identical to
+ ``__repr__``. This is usually not necessary except for
+ `Exception`\ s.
+ :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
+ and ``__ne__`` methods that check two instances for equality.
+
+ They compare the instances as if they were tuples of their ``attrs``
+ attributes if and only if the types of both classes are *identical*!
+ :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
+ ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
+ allow instances to be ordered. If ``None`` (default) mirror value of
+ *eq*.
+ :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
+ and *order* to the same value. Must not be mixed with *eq* or *order*.
+ :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
+ is generated according how *eq* and *frozen* are set.
+
+ 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
+ None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning the
+ ``__hash__`` method of the base class will be used (if base class is
+ ``object``, this means it will fall back to id-based hashing.).
+
+ Although not recommended, you can decide for yourself and force
+ ``attrs`` to create one (e.g. if the class is immutable even though you
+ didn't freeze it programmatically) by passing ``True`` or not. Both of
+ these cases are rather special and should be used carefully.
+
+ See our documentation on `hashing`, Python's documentation on
+ `object.__hash__`, and the `GitHub issue that led to the default \
+ behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
+ details.
+ :param bool init: Create a ``__init__`` method that initializes the
+ ``attrs`` attributes. Leading underscores are stripped for the argument
+ name. If a ``__attrs_pre_init__`` method exists on the class, it will
+ be called before the class is initialized. If a ``__attrs_post_init__``
+ method exists on the class, it will be called after the class is fully
+ initialized.
+
+ If ``init`` is ``False``, an ``__attrs_init__`` method will be
+ injected instead. This allows you to define a custom ``__init__``
+ method that can do pre-init work such as ``super().__init__()``,
+ and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
+ :param bool slots: Create a `slotted class <slotted classes>` that's more
+ memory-efficient. Slotted classes are generally superior to the default
+ dict classes, but have some gotchas you should know about, so we
+ encourage you to read the `glossary entry <slotted classes>`.
+ :param bool frozen: Make instances immutable after initialization. If
+ someone attempts to modify a frozen instance,
+ `attr.exceptions.FrozenInstanceError` is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__`` method
+ on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ <how-frozen>` when initializing new instances. In other words:
+ ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You can
+ circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
+ :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated
+ attributes (Python 3.6 and later only) from the class body.
+
+ In this case, you **must** annotate every field. If ``attrs``
+ encounters a field that is set to an `attr.ib` but lacks a type
+ annotation, an `attr.exceptions.UnannotatedAttributeError` is
+ raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+ want to set a type.
+
+ If you assign a value to those attributes (e.g. ``x: int = 42``), that
+ value becomes the default value like if it were passed using
+ ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also
+ works as expected in most cases (see warning below).
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `attr.ib` are **ignored**.
+
+ .. warning::
+ For features that use the attribute name to create decorators (e.g.
+ `validators <validators>`), you still *must* assign `attr.ib` to
+ them. Otherwise Python will either not find the name or try to use
+ the default value to call e.g. ``validator`` on it.
+
+ These errors can be quite confusing and probably the most common bug
+ report on our bug tracker.
+
+ .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
+ class. If the hash code is cached, avoid any reassignments of
+ fields involved in hash code computation or mutations of the objects
+ those fields point to after object creation. If such changes occur,
+ the behavior of the object's hash code is undefined.
+ :param bool auto_exc: If the class subclasses `BaseException`
+ (which implicitly includes any subclass of any exception), the
+ following happens to behave like a well-behaved Python exceptions
+ class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids (N.B. ``attrs`` will
+ *not* remove existing implementations of ``__hash__`` or the equality
+ methods. It just won't add own ones.),
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the ``args``
+ attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base classes.
+ :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
+ collects attributes from base classes. The default behavior is
+ incorrect in certain cases of multiple inheritance. It should be on by
+ default but is kept off for backward-compatibility.
+
+ See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
+ more details.
+
+ :param Optional[bool] getstate_setstate:
+ .. note::
+ This is usually only interesting for slotted classes and you should
+ probably just set *auto_detect* to `True`.
+
+ If `True`, ``__getstate__`` and
+ ``__setstate__`` are generated and attached to the class. This is
+ necessary for slotted classes to be pickleable. If left `None`, it's
+ `True` by default for slotted classes and ``False`` for dict classes.
+
+ If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
+ and **either** ``__getstate__`` or ``__setstate__`` is detected directly
+ on the class (i.e. not inherited), it is set to `False` (this is usually
+ what you want).
+
+ :param on_setattr: A callable that is run whenever the user attempts to set
+ an attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
+ as validators: the instance, the attribute that is being modified, and
+ the new value.
+
+ If no exception is raised, the attribute is set to the return value of
+ the callable.
+
+ If a list of callables is passed, they're automatically wrapped in an
+ `attrs.setters.pipe`.
+
+ :param Optional[callable] field_transformer:
+ A function that is called with the original class object and all
+ fields right before ``attrs`` finalizes the class. You can use
+ this, e.g., to automatically add converters or validators to
+ fields based on their types. See `transform-fields` for more details.
+
+ :param bool match_args:
+ If `True` (default), set ``__match_args__`` on the class to support
+ `PEP 634 <https://www.python.org/dev/peps/pep-0634/>`_ (Structural
+ Pattern Matching). It is a tuple of all positional-only ``__init__``
+ parameter names on Python 3.10 and later. Ignored on older Python
+ versions.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports ``None`` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 21.3.0 *match_args*
+ """
+ if auto_detect and PY2:
+ raise PythonTooOldError(
+ "auto_detect only works on Python 3 and later."
+ )
+
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+ hash_ = hash # work around the lack of nonlocal
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+
+ if getattr(cls, "__class__", None) is None:
+ raise TypeError("attrs only works with new-style classes.")
+
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ raise ValueError("Can't freeze a class with a custom __setattr__.")
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ is_frozen,
+ weakref_slot,
+ _determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_own_setattr,
+ field_transformer,
+ )
+ if _determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ):
+ builder.add_repr(repr_ns)
+ if str is True:
+ builder.add_str()
+
+ eq = _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+ if not is_exc and eq is True:
+ builder.add_eq()
+ if not is_exc and _determine_whether_to_implement(
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+ ):
+ builder.add_order()
+
+ builder.add_setattr()
+
+ if (
+ hash_ is None
+ and auto_detect is True
+ and _has_own_attribute(cls, "__hash__")
+ ):
+ hash = False
+ else:
+ hash = hash_
+ if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+ elif hash is False or (hash is None and eq is False) or is_exc:
+ # Don't do anything. Should fall back to __object__'s __hash__
+ # which is by id.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ elif hash is True or (
+ hash is None and eq is True and is_frozen is True
+ ):
+ # Build a __hash__ if told so, or if it's safe.
+ builder.add_hash()
+ else:
+ # Raise TypeError on attempts to hash.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ builder.make_unhashable()
+
+ if _determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ):
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
+
+ if (
+ PY310
+ and match_args
+ and not _has_own_attribute(cls, "__match_args__")
+ ):
+ builder.add_match_args()
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+if PY2:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return (
+ getattr(cls.__setattr__, "__module__", None)
+ == _frozen_setattrs.__module__
+ and cls.__setattr__.__name__ == _frozen_setattrs.__name__
+ )
+
+else:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ == _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ unique_filename = "<attrs generated {0} {1}.{2}>".format(
+ func_name,
+ cls.__module__,
+ getattr(cls, "__qualname__", cls.__name__),
+ )
+ return unique_filename
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ unique_filename = _generate_unique_filename(cls, "hash")
+ type_hash = hash(unique_filename)
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ if not PY2:
+ hash_def += ", *"
+
+ hash_def += (
+ ", _cache_wrapper="
+ + "__import__('attr._make')._make._CacheHashWrapper):"
+ )
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + " %d," % (type_hash,),
+ ]
+ )
+
+ for a in attrs:
+ method_lines.append(indent + " self.%s," % a.name)
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return _make_method("__hash__", script, unique_filename)
+
+
+def _add_hash(cls, attrs):
+ """
+ Add a hash method to *cls*.
+ """
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def _make_ne():
+ """
+ Create __ne__ method.
+ """
+
+ def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+ return __ne__
+
+
+def _make_eq(cls, attrs):
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ unique_filename = _generate_unique_filename(cls, "eq")
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ others = [" ) == ("]
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ " %s(self.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ others.append(
+ " %s(other.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ else:
+ lines.append(" self.%s," % (a.name,))
+ others.append(" other.%s," % (a.name,))
+
+ lines += others + [" )"]
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__ = _make_eq(cls, attrs)
+ cls.__ne__ = _make_ne()
+
+ return cls
+
+
+if HAS_F_STRINGS:
+
+ def _make_repr(attrs, ns, cls):
+ unique_filename = _generate_unique_filename(cls, "repr")
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, (repr if a.repr is True else a.repr), a.init)
+ for a in attrs
+ if a.repr is not False
+ )
+ globs = {
+ name + "_repr": r
+ for name, r, _ in attr_names_with_reprs
+ if r != repr
+ }
+ globs["_compat"] = _compat
+ globs["AttributeError"] = AttributeError
+ globs["NOTHING"] = NOTHING
+ attribute_fragments = []
+ for name, r, i in attr_names_with_reprs:
+ accessor = (
+ "self." + name
+ if i
+ else 'getattr(self, "' + name + '", NOTHING)'
+ )
+ fragment = (
+ "%s={%s!r}" % (name, accessor)
+ if r == repr
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
+ )
+ attribute_fragments.append(fragment)
+ repr_fragment = ", ".join(attribute_fragments)
+
+ if ns is None:
+ cls_name_fragment = (
+ '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+ )
+ else:
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+ lines = [
+ "def __repr__(self):",
+ " try:",
+ " already_repring = _compat.repr_context.already_repring",
+ " except AttributeError:",
+ " already_repring = {id(self),}",
+ " _compat.repr_context.already_repring = already_repring",
+ " else:",
+ " if id(self) in already_repring:",
+ " return '...'",
+ " else:",
+ " already_repring.add(id(self))",
+ " try:",
+ " return f'%s(%s)'" % (cls_name_fragment, repr_fragment),
+ " finally:",
+ " already_repring.remove(id(self))",
+ ]
+
+ return _make_method(
+ "__repr__", "\n".join(lines), unique_filename, globs=globs
+ )
+
+else:
+
+ def _make_repr(attrs, ns, _):
+ """
+ Make a repr method that includes relevant *attrs*, adding *ns* to the
+ full name.
+ """
+
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, repr if a.repr is True else a.repr)
+ for a in attrs
+ if a.repr is not False
+ )
+
+ def __repr__(self):
+ """
+ Automatically created by attrs.
+ """
+ try:
+ already_repring = _compat.repr_context.already_repring
+ except AttributeError:
+ already_repring = set()
+ _compat.repr_context.already_repring = already_repring
+
+ if id(self) in already_repring:
+ return "..."
+ real_cls = self.__class__
+ if ns is None:
+ qualname = getattr(real_cls, "__qualname__", None)
+ if qualname is not None: # pragma: no cover
+ # This case only happens on Python 3.5 and 3.6. We exclude
+ # it from coverage, because we don't want to slow down our
+ # test suite by running them under coverage too for this
+ # one line.
+ class_name = qualname.rsplit(">.", 1)[-1]
+ else:
+ class_name = real_cls.__name__
+ else:
+ class_name = ns + "." + real_cls.__name__
+
+ # Since 'self' remains on the stack (i.e.: strongly referenced)
+ # for the duration of this call, it's safe to depend on id(...)
+ # stability, and not need to track the instance and therefore
+ # worry about properties like weakref- or hash-ability.
+ already_repring.add(id(self))
+ try:
+ result = [class_name, "("]
+ first = True
+ for name, attr_repr in attr_names_with_reprs:
+ if first:
+ first = False
+ else:
+ result.append(", ")
+ result.extend(
+ (name, "=", attr_repr(getattr(self, name, NOTHING)))
+ )
+ return "".join(result) + ")"
+ finally:
+ already_repring.remove(id(self))
+
+ return __repr__
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns, cls)
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of ``attrs`` attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: tuple (with name accessors) of `attrs.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of ``attrs`` attributes for a class, whose
+ keys are the attribute names.
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: an ordered dict where keys are attribute names and values are
+ `attrs.Attribute`\\ s. This will be a `dict` if it's
+ naturally ordered like on Python 3.6+ or an
+ :class:`~collections.OrderedDict` otherwise.
+
+ .. versionadded:: 18.1.0
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return ordered_dict(((a.name, a) for a in attrs))
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_cls(cls):
+ return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _make_init(
+ cls,
+ attrs,
+ pre_init,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ cls_on_setattr,
+ attrs_init,
+):
+ has_cls_on_setattr = (
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+ )
+
+ if frozen and has_cls_on_setattr:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = True
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+ needs_cached_setattr = True
+
+ unique_filename = _generate_unique_filename(cls, "init")
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_cls_on_setattr,
+ attrs_init,
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_cached_setattr"] = _obj_setattr
+
+ init = _make_method(
+ "__attrs_init__" if attrs_init else "__init__",
+ script,
+ unique_filename,
+ globs,
+ )
+ init.__annotations__ = annotations
+
+ return init
+
+
+def _setattr(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return "_setattr('%s', %s)" % (attr_name, value_var)
+
+
+def _setattr_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return "_setattr('%s', %s(%s))" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _assign(attr_name, value, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return "self.%s = %s" % (attr_name, value)
+
+
+def _assign_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True)
+
+ return "self.%s = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+if PY2:
+
+ def _unpack_kw_only_py2(attr_name, default=None):
+ """
+ Unpack *attr_name* from _kw_only dict.
+ """
+ if default is not None:
+ arg_default = ", %s" % default
+ else:
+ arg_default = ""
+ return "%s = _kw_only.pop('%s'%s)" % (
+ attr_name,
+ attr_name,
+ arg_default,
+ )
+
+ def _unpack_kw_only_lines_py2(kw_only_args):
+ """
+ Unpack all *kw_only_args* from _kw_only dict and handle errors.
+
+ Given a list of strings "{attr_name}" and "{attr_name}={default}"
+ generates list of lines of code that pop attrs from _kw_only dict and
+ raise TypeError similar to builtin if required attr is missing or
+ extra key is passed.
+
+ >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"])))
+ try:
+ a = _kw_only.pop('a')
+ b = _kw_only.pop('b', 42)
+ except KeyError as _key_error:
+ raise TypeError(
+ ...
+ if _kw_only:
+ raise TypeError(
+ ...
+ """
+ lines = ["try:"]
+ lines.extend(
+ " " + _unpack_kw_only_py2(*arg.split("="))
+ for arg in kw_only_args
+ )
+ lines += """\
+except KeyError as _key_error:
+ raise TypeError(
+ '__init__() missing required keyword-only argument: %s' % _key_error
+ )
+if _kw_only:
+ raise TypeError(
+ '__init__() got an unexpected keyword argument %r'
+ % next(iter(_kw_only))
+ )
+""".split(
+ "\n"
+ )
+ return lines
+
+
+def _attrs_to_init_script(
+ attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_cls_on_setattr,
+ attrs_init,
+):
+ """
+ Return a script of an initializer for *attrs* and a dict of globals.
+
+ The globals are expected by the generated script.
+
+ If *frozen* is True, we cannot set the attributes directly so we use
+ a cached ``object.__setattr__``.
+ """
+ lines = []
+ if pre_init:
+ lines.append("self.__attrs_pre_init__()")
+
+ if needs_cached_setattr:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment.
+ # Note _setattr will be used again below if cache_hash is True
+ "_setattr = _cached_setattr.__get__(self, self.__class__)"
+ )
+
+ if frozen is True:
+ if slots is True:
+ fmt_setter = _setattr
+ fmt_setter_with_converter = _setattr_with_converter
+ else:
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
+
+ def fmt_setter(attr_name, value_var, has_on_setattr):
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return "_inst_dict['%s'] = %s" % (attr_name, value_var)
+
+ def fmt_setter_with_converter(
+ attr_name, value_var, has_on_setattr
+ ):
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr
+ )
+
+ return "_inst_dict['%s'] = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+ else:
+ # Not frozen.
+ fmt_setter = _assign
+ fmt_setter_with_converter = _assign_with_converter
+
+ args = []
+ kw_only_args = []
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+ )
+ arg_name = a.name.lstrip("_")
+
+ has_factory = isinstance(a.default, Factory)
+ if has_factory and a.default.takes_self:
+ maybe_self = "self"
+ else:
+ maybe_self = ""
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = "%s=NOTHING" % (arg_name,)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append("if %s is not NOTHING:" % (arg_name,))
+
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and a.converter is None:
+ annotations[arg_name] = a.type
+ elif a.converter is not None and not PY2:
+ # Try to get the type from the converter.
+ sig = None
+ try:
+ sig = inspect.signature(a.converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ sig_params = list(sig.parameters.values())
+ if (
+ sig_params
+ and sig_params[0].annotation
+ is not inspect.Parameter.empty
+ ):
+ annotations[arg_name] = sig_params[0].annotation
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(
+ " %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
+ )
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # because this is set only after __attrs_post_init is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr('%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join("self." + a.name for a in attrs if a.init)
+
+ lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+ args = ", ".join(args)
+ if kw_only_args:
+ if PY2:
+ lines = _unpack_kw_only_lines_py2(kw_only_args) + lines
+
+ args += "%s**_kw_only" % (", " if args else "",) # leading comma
+ else:
+ args += "%s*, %s" % (
+ ", " if args else "", # leading comma
+ ", ".join(kw_only_args), # kw_only args
+ )
+ return (
+ """\
+def {init_name}(self, {args}):
+ {lines}
+""".format(
+ init_name=("__attrs_init__" if attrs_init else "__init__"),
+ args=args,
+ lines="\n ".join(lines) if lines else "pass",
+ ),
+ names_for_globals,
+ annotations,
+ )
+
+
+class Attribute(object):
+ """
+ *Read-only* representation of an attribute.
+
+ The class has *all* arguments of `attr.ib` (except for ``factory``
+ which is only syntactic sugar for ``default=Factory(...)`` plus the
+ following:
+
+ - ``name`` (`str`): The name of the attribute.
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+ from a base class.
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables
+ that are used for comparing and ordering objects by this attribute,
+ respectively. These are set by passing a callable to `attr.ib`'s ``eq``,
+ ``order``, or ``cmp`` arguments. See also :ref:`comparison customization
+ <custom-comparison>`.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The :ref:`field transformer <transform-fields>` hook receives a list of
+ them.
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ __slots__ = (
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ metadata_proxy(metadata)
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError()
+
+ @classmethod
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ raise ValueError(
+ "Type annotation and type argument cannot both be present"
+ )
+ inst_dict = {
+ k: getattr(ca, k)
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "inherited",
+ ) # exclude methods and deprecated alias
+ }
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ cmp=None,
+ inherited=False,
+ **inst_dict
+ )
+
+ @property
+ def cmp(self):
+ """
+ Simulate the presence of a cmp attribute and warn.
+ """
+ warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2)
+
+ return self.eq and self.order
+
+ # Don't use attr.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attr.evolve` but that function does not work
+ with ``Attribute``.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ metadata_proxy(value)
+ if value
+ else _empty_metadata_singleton,
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr(object):
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ "on_setattr",
+ )
+ __attrs_attrs__ = tuple(
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ )
+ ) + (
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ :raises DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError()
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory(object):
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attrs.field`, the factory is used to
+ generate a new value.
+
+ :param callable factory: A callable that takes either none or exactly one
+ mandatory positional argument depending on *takes_self*.
+ :param bool takes_self: Pass the partially initialized instance that is
+ being initialized as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ """
+ `Factory` is part of the default machinery so if we want a default
+ value here, we have to implement it ourselves.
+ """
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+def make_class(name, attrs, bases=(object,), **attributes_arguments):
+ """
+ A quick way to create a new class called *name* with *attrs*.
+
+ :param str name: The name for the new class.
+
+ :param attrs: A list of names or a dictionary of mappings of names to
+ attributes.
+
+ If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the names or attributes inside *attrs*. Otherwise the
+ order of the definition of the attributes is used.
+ :type attrs: `list` or `dict`
+
+ :param tuple bases: Classes that the new class will subclass.
+
+ :param attributes_arguments: Passed unmodified to `attr.s`.
+
+ :return: A new class with *attrs*.
+ :rtype: type
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ """
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = dict((a, attrib()) for a in attrs)
+ else:
+ raise TypeError("attrs argument must be a dict or a list.")
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ try:
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+ except (AttributeError, ValueError):
+ pass
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator(object):
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ :param callables validators: Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if
+ they have any.
+
+ :param callables converters: Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def pipe_converter(val):
+ for converter in converters:
+ val = converter(val)
+
+ return val
+
+ if not PY2:
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = typing.TypeVar("A")
+ pipe_converter.__annotations__ = {"val": A, "return": A}
+ else:
+ # Get parameter type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[0])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if (
+ params
+ and params[0].annotation is not inspect.Parameter.empty
+ ):
+ pipe_converter.__annotations__["val"] = params[
+ 0
+ ].annotation
+ # Get return type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[-1])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig and sig.return_annotation is not inspect.Signature().empty:
+ pipe_converter.__annotations__[
+ "return"
+ ] = sig.return_annotation
+
+ return pipe_converter
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py
new file mode 100644
index 0000000000..068253688c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py
@@ -0,0 +1,216 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
+`attr.ib` with different default values.
+"""
+
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+ NOTHING,
+ _frozen_setattrs,
+ _ng_default_on_setattr,
+ attrib,
+ attrs,
+)
+from .exceptions import UnannotatedAttributeError
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ Define an ``attrs`` class.
+
+ Differences to the classic `attr.s` that it uses underneath:
+
+ - Automatically detect whether or not *auto_attribs* should be `True`
+ (c.f. *auto_attribs* parameter).
+ - If *frozen* is `False`, run converters and validators when setting an
+ attribute by default.
+ - *slots=True* (see :term:`slotted classes` for potentially surprising
+ behaviors)
+ - *auto_exc=True*
+ - *auto_detect=True*
+ - *order=False*
+ - *match_args=True*
+ - Some options that were only relevant on Python 2 or were kept around for
+ backwards-compatibility have been removed.
+
+ Please note that these are all defaults and you can change them as you
+ wish.
+
+ :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
+ exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
+
+ 1. If any attributes are annotated and no unannotated `attrs.fields`\ s
+ are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attrs.fields`\ s.
+
+ For now, please refer to `attr.s` for the rest of the parameters.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ match_args=match_args,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes convert & validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = _ng_default_on_setattr
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ raise ValueError(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ )
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Identical to `attr.ib`, except keyword-only and with some arguments
+ removed.
+
+ .. versionadded:: 20.1.0
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+ """
+ Same as `attr.asdict`, except that collections types are always retained
+ and dict is always used as *dict_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _asdict(
+ inst=inst,
+ recurse=recurse,
+ filter=filter,
+ value_serializer=value_serializer,
+ retain_collection_types=True,
+ )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+ """
+ Same as `attr.astuple`, except that collections types are always retained
+ and `tuple` is always used as the *tuple_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _astuple(
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py
new file mode 100644
index 0000000000..cdaeec37a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py
@@ -0,0 +1,87 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo(object):
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi
new file mode 100644
index 0000000000..45ced08633
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py
new file mode 100644
index 0000000000..1fb6c05d7b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful converters.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import PY2
+from ._make import NOTHING, Factory, pipe
+
+
+if not PY2:
+ import inspect
+ import typing
+
+
+__all__ = [
+ "default_if_none",
+ "optional",
+ "pipe",
+ "to_bool",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to ``None``.
+
+ Type annotations will be inferred from the wrapped converter's, if it
+ has any.
+
+ :param callable converter: the converter that is used for non-``None``
+ values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ if not PY2:
+ sig = None
+ try:
+ sig = inspect.signature(converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ optional_converter.__annotations__["val"] = typing.Optional[
+ params[0].annotation
+ ]
+ if sig.return_annotation is not inspect.Signature.empty:
+ optional_converter.__annotations__["return"] = typing.Optional[
+ sig.return_annotation
+ ]
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
+ of `attrs.Factory` is supported, however the ``takes_self`` option
+ is *not*.
+ :param callable factory: A callable that takes no parameters whose result
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
+ :raises ValueError: If an instance of `attrs.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
+
+
+def to_bool(val):
+ """
+ Convert "boolean" strings (e.g., from env. vars.) to real booleans.
+
+ Values mapping to :code:`True`:
+
+ - :code:`True`
+ - :code:`"true"` / :code:`"t"`
+ - :code:`"yes"` / :code:`"y"`
+ - :code:`"on"`
+ - :code:`"1"`
+ - :code:`1`
+
+ Values mapping to :code:`False`:
+
+ - :code:`False`
+ - :code:`"false"` / :code:`"f"`
+ - :code:`"no"` / :code:`"n"`
+ - :code:`"off"`
+ - :code:`"0"`
+ - :code:`0`
+
+ :raises ValueError: for any other value.
+
+ .. versionadded:: 21.3.0
+ """
+ if isinstance(val, str):
+ val = val.lower()
+ truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
+ falsy = {False, "false", "f", "no", "n", "off", "0", 0}
+ try:
+ if val in truthy:
+ return True
+ if val in falsy:
+ return False
+ except TypeError:
+ # Raised when "val" is not hashable (e.g., lists)
+ pass
+ raise ValueError("Cannot convert value to bool: {}".format(val))
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi
new file mode 100644
index 0000000000..0f58088a37
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi
@@ -0,0 +1,13 @@
+from typing import Callable, Optional, TypeVar, overload
+
+from . import _ConverterType
+
+_T = TypeVar("_T")
+
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
+def to_bool(val: str) -> bool: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py
new file mode 100644
index 0000000000..b2f1edc32a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py
@@ -0,0 +1,94 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An ``attrs`` function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-``attrs`` class has been passed into an ``attrs`` function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set using ``attr.ib()`` and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+ annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an ``attrs`` feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A ``attr.ib()`` requiring a callable has been set with a value
+ that is not callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi
new file mode 100644
index 0000000000..f2680118b4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py
new file mode 100644
index 0000000000..a1978a8775
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py
@@ -0,0 +1,54 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful filters for `attr.asdict`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import isclass
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isclass(cls)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Include *what*.
+
+ :param what: What to include.
+ :type what: `list` of `type` or `attrs.Attribute`\\ s
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return value.__class__ in cls or attribute in attrs
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Exclude *what*.
+
+ :param what: What to exclude.
+ :type what: `list` of classes or `attrs.Attribute`\\ s.
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return value.__class__ not in cls and attribute not in attrs
+
+ return exclude_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi
new file mode 100644
index 0000000000..993866865e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi
@@ -0,0 +1,6 @@
+from typing import Any, Union
+
+from . import Attribute, _FilterType
+
+def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
+def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed b/testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py
new file mode 100644
index 0000000000..b1cbb5d83e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ return c(new_value)
+
+ return new_value
+
+
+NO_OP = object()
+"""
+Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+
+Does not work in `pipe` or within lists.
+
+.. versionadded:: 20.1.0
+"""
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi
new file mode 100644
index 0000000000..3f5603c2b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi
@@ -0,0 +1,19 @@
+from typing import Any, NewType, NoReturn, TypeVar, cast
+
+from . import Attribute, _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py
new file mode 100644
index 0000000000..0b0c8342f2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py
@@ -0,0 +1,561 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful validators.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import operator
+import re
+
+from contextlib import contextmanager
+
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+try:
+ Pattern = re.Pattern
+except AttributeError: # Python <3.7 lacks a Pattern type.
+ Pattern = type(re.compile(""))
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "disabled",
+ "ge",
+ "get_disabled",
+ "gt",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "le",
+ "lt",
+ "matches_re",
+ "max_len",
+ "optional",
+ "provides",
+ "set_disabled",
+]
+
+
+def set_disabled(disabled):
+ """
+ Globally disable or enable running validators.
+
+ By default, they are run.
+
+ :param disabled: If ``True``, disable running all validators.
+ :type disabled: bool
+
+ .. warning::
+
+ This function is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(not disabled)
+
+
+def get_disabled():
+ """
+ Return a bool indicating whether validators are currently disabled or not.
+
+ :return: ``True`` if validators are currently disabled.
+ :rtype: bool
+
+ .. versionadded:: 21.3.0
+ """
+ return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+ """
+ Context manager that disables running validators within its context.
+
+ .. warning::
+
+ This context manager is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(False)
+ try:
+ yield
+ finally:
+ set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator(object):
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ raise TypeError(
+ "'{name}' must be {type!r} (got {value!r} that is a "
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return "<instance_of validator for type {type!r}>".format(
+ type=self.type
+ )
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with a wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ :param type: The type to check for.
+ :type type: type or tuple of types
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attrs.Attribute`), the expected type, and the value it
+ got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator(object):
+ pattern = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ raise ValueError(
+ "'{name}' must match regex {pattern!r}"
+ " ({value!r} doesn't)".format(
+ name=attr.name, pattern=self.pattern.pattern, value=value
+ ),
+ attr,
+ self.pattern,
+ value,
+ )
+
+ def __repr__(self):
+ return "<matches_re validator for pattern {pattern!r}>".format(
+ pattern=self.pattern
+ )
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called
+ with a string that doesn't match *regex*.
+
+ :param regex: a regex string or precompiled pattern to match against
+ :param int flags: flags that will be passed to the underlying re function
+ (default 0)
+ :param callable func: which underlying `re` function to call (options
+ are `re.fullmatch`, `re.search`, `re.match`, default
+ is ``None`` which means either `re.fullmatch` or an emulation of
+ it on Python 2). For performance reasons, they won't be used directly
+ but on a pre-`re.compile`\ ed pattern.
+
+ .. versionadded:: 19.2.0
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+ """
+ fullmatch = getattr(re, "fullmatch", None)
+ valid_funcs = (fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ raise ValueError(
+ "'func' must be one of {}.".format(
+ ", ".join(
+ sorted(
+ e and e.__name__ or "None" for e in set(valid_funcs)
+ )
+ )
+ )
+ )
+
+ if isinstance(regex, Pattern):
+ if flags:
+ raise TypeError(
+ "'flags' can only be used with a string pattern; "
+ "pass flags to re.compile() instead"
+ )
+ pattern = regex
+ else:
+ pattern = re.compile(regex, flags)
+
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ elif fullmatch:
+ match_func = pattern.fullmatch
+ else: # Python 2 fullmatch emulation (https://bugs.python.org/issue16203)
+ pattern = re.compile(
+ r"(?:{})\Z".format(pattern.pattern), pattern.flags
+ )
+ match_func = pattern.match
+
+ return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator(object):
+ interface = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.interface.providedBy(value):
+ raise TypeError(
+ "'{name}' must provide {interface!r} which {value!r} "
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
+ )
+
+ def __repr__(self):
+ return "<provides validator for interface {interface!r}>".format(
+ interface=self.interface
+ )
+
+
+def provides(interface):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with an object that does not provide the requested *interface* (checks are
+ performed using ``interface.providedBy(value)`` (see `zope.interface
+ <https://zopeinterface.readthedocs.io/en/latest/>`_).
+
+ :param interface: The interface to check for.
+ :type interface: ``zope.interface.Interface``
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attrs.Attribute`), the expected interface, and the
+ value it got.
+ """
+ return _ProvidesValidator(interface)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator(object):
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return "<optional validator for {what} or None>".format(
+ what=repr(self.validator)
+ )
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to ``None`` in addition to satisfying the requirements of
+ the sub-validator.
+
+ :param validator: A validator (or a list of validators) that is used for
+ non-``None`` values.
+ :type validator: callable or `list` of callables.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ """
+ if isinstance(validator, list):
+ return _OptionalValidator(_AndValidator(validator))
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator(object):
+ options = attrib()
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ raise ValueError(
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ )
+ )
+
+ def __repr__(self):
+ return "<in_ validator with options {options!r}>".format(
+ options=self.options
+ )
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called
+ with a value that does not belong in the options provided. The check is
+ performed using ``value in options``.
+
+ :param options: Allowed options.
+ :type options: list, tuple, `enum.Enum`, ...
+
+ :raises ValueError: With a human readable error message, the attribute (of
+ type `attrs.Attribute`), the expected options, and the value it
+ got.
+
+ .. versionadded:: 17.1.0
+ """
+ return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator(object):
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return "<is_callable validator>"
+
+
+def is_callable():
+ """
+ A validator that raises a `attr.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute
+ that is not callable.
+
+ .. versionadded:: 19.1.0
+
+ :raises `attr.exceptions.NotCallableError`: With a human readable error
+ message containing the attribute (`attrs.Attribute`) name,
+ and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable(object):
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else " {iterable!r}".format(iterable=self.iterable_validator)
+ )
+ return (
+ "<deep_iterable validator for{iterable_identifier}"
+ " iterables of {member!r}>"
+ ).format(
+ iterable_identifier=iterable_identifier,
+ member=self.member_validator,
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ :param member_validator: Validator to apply to iterable members
+ :param iterable_validator: Validator to apply to iterable itself
+ (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping(object):
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return (
+ "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
+ ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ :param key_validator: Validator to apply to dictionary keys
+ :param value_validator: Validator to apply to dictionary values
+ :param mapping_validator: Validator to apply to top-level mapping
+ attribute (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator(object):
+ bound = attrib()
+ compare_op = attrib()
+ compare_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.compare_func(value, self.bound):
+ raise ValueError(
+ "'{name}' must be {op} {bound}: {value}".format(
+ name=attr.name,
+ op=self.compare_op,
+ bound=self.bound,
+ value=value,
+ )
+ )
+
+ def __repr__(self):
+ return "<Validator for x {op} {bound}>".format(
+ op=self.compare_op, bound=self.bound
+ )
+
+
+def lt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number larger or equal to *val*.
+
+ :param val: Exclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number greater than *val*.
+
+ :param val: Inclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller than *val*.
+
+ :param val: Inclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller or equal to *val*.
+
+ :param val: Exclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator(object):
+ max_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) > self.max_length:
+ raise ValueError(
+ "Length of '{name}' must be <= {max}: {len}".format(
+ name=attr.name, max=self.max_length, len=len(value)
+ )
+ )
+
+ def __repr__(self):
+ return "<max_len validator for {max}>".format(max=self.max_length)
+
+
+def max_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is longer than *length*.
+
+ :param int length: Maximum length of the string or iterable
+
+ .. versionadded:: 21.3.0
+ """
+ return _MaxLengthValidator(length)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi
new file mode 100644
index 0000000000..5e00b85433
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi
@@ -0,0 +1,78 @@
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ ContextManager,
+ Iterable,
+ List,
+ Mapping,
+ Match,
+ Optional,
+ Pattern,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+from . import _ValidatorType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2]]
+) -> _ValidatorType[Union[_T1, _T2]]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
+) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
+@overload
+def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
+def provides(interface: Any) -> _ValidatorType[Any]: ...
+def optional(
+ validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
+) -> _ValidatorType[Optional[_T]]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: Union[Pattern[AnyStr], AnyStr],
+ flags: int = ...,
+ func: Optional[
+ Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
+ ] = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorType[_T],
+ iterable_validator: Optional[_ValidatorType[_I]] = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+ key_validator: _ValidatorType[_K],
+ value_validator: _ValidatorType[_V],
+ mapping_validator: Optional[_ValidatorType[_M]] = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py
new file mode 100644
index 0000000000..a704b8b56b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py
@@ -0,0 +1,70 @@
+# SPDX-License-Identifier: MIT
+
+from attr import (
+ NOTHING,
+ Attribute,
+ Factory,
+ __author__,
+ __copyright__,
+ __description__,
+ __doc__,
+ __email__,
+ __license__,
+ __title__,
+ __url__,
+ __version__,
+ __version_info__,
+ assoc,
+ cmp_using,
+ define,
+ evolve,
+ field,
+ fields,
+ fields_dict,
+ frozen,
+ has,
+ make_class,
+ mutable,
+ resolve_types,
+ validate,
+)
+from attr._next_gen import asdict, astuple
+
+from . import converters, exceptions, filters, setters, validators
+
+
+__all__ = [
+ "__author__",
+ "__copyright__",
+ "__description__",
+ "__doc__",
+ "__email__",
+ "__license__",
+ "__title__",
+ "__url__",
+ "__version__",
+ "__version_info__",
+ "asdict",
+ "assoc",
+ "astuple",
+ "Attribute",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "Factory",
+ "field",
+ "fields_dict",
+ "fields",
+ "filters",
+ "frozen",
+ "has",
+ "make_class",
+ "mutable",
+ "NOTHING",
+ "resolve_types",
+ "setters",
+ "validate",
+ "validators",
+]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi
new file mode 100644
index 0000000000..7426fa5ddb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi
@@ -0,0 +1,63 @@
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+)
+
+# Because we need to type our own stuff, we have to make everything from
+# attr explicitly public too.
+from attr import __author__ as __author__
+from attr import __copyright__ as __copyright__
+from attr import __description__ as __description__
+from attr import __email__ as __email__
+from attr import __license__ as __license__
+from attr import __title__ as __title__
+from attr import __url__ as __url__
+from attr import __version__ as __version__
+from attr import __version_info__ as __version_info__
+from attr import _FilterType
+from attr import assoc as assoc
+from attr import Attribute as Attribute
+from attr import define as define
+from attr import evolve as evolve
+from attr import Factory as Factory
+from attr import exceptions as exceptions
+from attr import field as field
+from attr import fields as fields
+from attr import fields_dict as fields_dict
+from attr import frozen as frozen
+from attr import has as has
+from attr import make_class as make_class
+from attr import mutable as mutable
+from attr import NOTHING as NOTHING
+from attr import resolve_types as resolve_types
+from attr import setters as setters
+from attr import validate as validate
+from attr import validators as validators
+
+# TODO: see definition of attr.asdict/astuple
+def asdict(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[
+ Callable[[type, Attribute[Any], Any], Any]
+ ] = ...,
+ tuple_keys: bool = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py
new file mode 100644
index 0000000000..edfa8d3c16
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.converters import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py
new file mode 100644
index 0000000000..bd9efed202
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.exceptions import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py
new file mode 100644
index 0000000000..52959005b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.filters import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py
new file mode 100644
index 0000000000..9b50770804
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.setters import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py
new file mode 100644
index 0000000000..ab2c9b3024
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.validators import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py b/testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py
new file mode 100644
index 0000000000..548d2d447d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py
@@ -0,0 +1 @@
+# SPDX-License-Identifier: MIT
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py b/testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py
new file mode 100644
index 0000000000..eaec321bac
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py
@@ -0,0 +1,10 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import
+
+from attr import * # noqa: F401,F403
+
+
+# This is imported by test_import::test_from_attr_import_star; this must
+# be done indirectly because importing * is only allowed on module level,
+# so can't be done inside a test.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py b/testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py
new file mode 100644
index 0000000000..49e09061a8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: MIT
+
+import attr
+
+
+@attr.define()
+class Define:
+ a: str
+ b: int
+
+
+reveal_type(Define.__init__) # noqa
+
+
+@attr.define()
+class DefineConverter:
+ # mypy plugin adapts the "int" method signature, pyright does not
+ with_converter: int = attr.field(converter=int)
+
+
+reveal_type(DefineConverter.__init__) # noqa
+
+
+# mypy plugin supports attr.frozen, pyright does not
+@attr.frozen()
+class Frozen:
+ a: str
+
+
+d = Frozen("a")
+d.a = "new"
+
+reveal_type(d.a) # noqa
+
+
+# but pyright supports attr.define(frozen)
+@attr.define(frozen=True)
+class FrozenDefine:
+ a: str
+
+
+d2 = FrozenDefine("a")
+d2.a = "new"
+
+reveal_type(d2.a) # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py b/testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py
new file mode 100644
index 0000000000..99f9f48536
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py
@@ -0,0 +1,198 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Testing strategies for Hypothesis-based tests.
+"""
+
+import keyword
+import string
+
+from collections import OrderedDict
+
+from hypothesis import strategies as st
+
+import attr
+
+from .utils import make_class
+
+
+optional_bool = st.one_of(st.none(), st.booleans())
+
+
+def gen_attr_names():
+ """
+ Generate names for attributes, 'a'...'z', then 'aa'...'zz'.
+
+ ~702 different attribute names should be enough in practice.
+
+ Some short strings (such as 'as') are keywords, so we skip them.
+ """
+ lc = string.ascii_lowercase
+ for c in lc:
+ yield c
+ for outer in lc:
+ for inner in lc:
+ res = outer + inner
+ if keyword.iskeyword(res):
+ continue
+ yield outer + inner
+
+
+def maybe_underscore_prefix(source):
+ """
+ A generator to sometimes prepend an underscore.
+ """
+ to_underscore = False
+ for val in source:
+ yield val if not to_underscore else "_" + val
+ to_underscore = not to_underscore
+
+
+@st.composite
+def _create_hyp_nested_strategy(draw, simple_class_strategy):
+ """
+ Create a recursive attrs class.
+
+ Given a strategy for building (simpler) classes, create and return
+ a strategy for building classes that have as an attribute: either just
+ the simpler class, a list of simpler classes, a tuple of simpler classes,
+ an ordered dict or a dict mapping the string "cls" to a simpler class.
+ """
+ cls = draw(simple_class_strategy)
+ factories = [
+ cls,
+ lambda: [cls()],
+ lambda: (cls(),),
+ lambda: {"cls": cls()},
+ lambda: OrderedDict([("cls", cls())]),
+ ]
+ factory = draw(st.sampled_from(factories))
+ attrs = draw(list_of_attrs) + [attr.ib(default=attr.Factory(factory))]
+ return make_class("HypClass", dict(zip(gen_attr_names(), attrs)))
+
+
+bare_attrs = st.builds(attr.ib, default=st.none())
+int_attrs = st.integers().map(lambda i: attr.ib(default=i))
+str_attrs = st.text().map(lambda s: attr.ib(default=s))
+float_attrs = st.floats().map(lambda f: attr.ib(default=f))
+dict_attrs = st.dictionaries(keys=st.text(), values=st.integers()).map(
+ lambda d: attr.ib(default=d)
+)
+
+simple_attrs_without_metadata = (
+ bare_attrs | int_attrs | str_attrs | float_attrs | dict_attrs
+)
+
+
+@st.composite
+def simple_attrs_with_metadata(draw):
+ """
+ Create a simple attribute with arbitrary metadata.
+ """
+ c_attr = draw(simple_attrs)
+ keys = st.booleans() | st.binary() | st.integers() | st.text()
+ vals = st.booleans() | st.binary() | st.integers() | st.text()
+ metadata = draw(
+ st.dictionaries(keys=keys, values=vals, min_size=1, max_size=3)
+ )
+
+ return attr.ib(
+ default=c_attr._default,
+ validator=c_attr._validator,
+ repr=c_attr.repr,
+ eq=c_attr.eq,
+ order=c_attr.order,
+ hash=c_attr.hash,
+ init=c_attr.init,
+ metadata=metadata,
+ type=None,
+ converter=c_attr.converter,
+ )
+
+
+simple_attrs = simple_attrs_without_metadata | simple_attrs_with_metadata()
+
+# Python functions support up to 255 arguments.
+list_of_attrs = st.lists(simple_attrs, max_size=3)
+
+
+@st.composite
+def simple_classes(
+ draw, slots=None, frozen=None, weakref_slot=None, private_attrs=None
+):
+ """
+ A strategy that generates classes with default non-attr attributes.
+
+ For example, this strategy might generate a class such as:
+
+ @attr.s(slots=True, frozen=True, weakref_slot=True)
+ class HypClass:
+ a = attr.ib(default=1)
+ _b = attr.ib(default=None)
+ c = attr.ib(default='text')
+ _d = attr.ib(default=1.0)
+ c = attr.ib(default={'t': 1})
+
+ By default, all combinations of slots, frozen, and weakref_slot classes
+ will be generated. If `slots=True` is passed in, only slotted classes will
+ be generated, and if `slots=False` is passed in, no slotted classes will be
+ generated. The same applies to `frozen` and `weakref_slot`.
+
+ By default, some attributes will be private (i.e. prefixed with an
+ underscore). If `private_attrs=True` is passed in, all attributes will be
+ private, and if `private_attrs=False`, no attributes will be private.
+ """
+ attrs = draw(list_of_attrs)
+ frozen_flag = draw(st.booleans())
+ slots_flag = draw(st.booleans())
+ weakref_flag = draw(st.booleans())
+
+ if private_attrs is None:
+ attr_names = maybe_underscore_prefix(gen_attr_names())
+ elif private_attrs is True:
+ attr_names = ("_" + n for n in gen_attr_names())
+ elif private_attrs is False:
+ attr_names = gen_attr_names()
+
+ cls_dict = dict(zip(attr_names, attrs))
+ pre_init_flag = draw(st.booleans())
+ post_init_flag = draw(st.booleans())
+ init_flag = draw(st.booleans())
+
+ if pre_init_flag:
+
+ def pre_init(self):
+ pass
+
+ cls_dict["__attrs_pre_init__"] = pre_init
+
+ if post_init_flag:
+
+ def post_init(self):
+ pass
+
+ cls_dict["__attrs_post_init__"] = post_init
+
+ if not init_flag:
+
+ def init(self, *args, **kwargs):
+ self.__attrs_init__(*args, **kwargs)
+
+ cls_dict["__init__"] = init
+
+ return make_class(
+ "HypClass",
+ cls_dict,
+ slots=slots_flag if slots is None else slots,
+ frozen=frozen_flag if frozen is None else frozen,
+ weakref_slot=weakref_flag if weakref_slot is None else weakref_slot,
+ init=init_flag,
+ )
+
+
+# st.recursive works by taking a base strategy (in this case, simple_classes)
+# and a special function. This function receives a strategy, and returns
+# another strategy (building on top of the base strategy).
+nested_classes = st.recursive(
+ simple_classes(), _create_hyp_nested_strategy, max_leaves=3
+)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py
new file mode 100644
index 0000000000..8866d7f6ef
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for compatibility against other Python modules.
+"""
+
+import pytest
+
+from hypothesis import given
+
+from .strategies import simple_classes
+
+
+cloudpickle = pytest.importorskip("cloudpickle")
+
+
+class TestCloudpickleCompat(object):
+ """
+ Tests for compatibility with ``cloudpickle``.
+ """
+
+ @given(simple_classes())
+ def test_repr(self, cls):
+ """
+ attrs instances can be pickled and un-pickled with cloudpickle.
+ """
+ inst = cls()
+ # Exact values aren't a concern so long as neither direction
+ # raises an exception.
+ pkl = cloudpickle.dumps(inst)
+ cloudpickle.loads(pkl)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py
new file mode 100644
index 0000000000..a201ebf7fa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py
@@ -0,0 +1,671 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for PEP-526 type annotations.
+
+Python 3.6+ only.
+"""
+
+import sys
+import types
+import typing
+
+import pytest
+
+import attr
+
+from attr._make import _is_class_var
+from attr.exceptions import UnannotatedAttributeError
+
+
+def assert_init_annotations(cls, **annotations):
+ """
+ Assert cls.__init__ has the correct annotations.
+ """
+ __tracebackhide__ = True
+
+ annotations["return"] = type(None)
+
+ assert annotations == typing.get_type_hints(cls.__init__)
+
+
+class TestAnnotations:
+ """
+ Tests for types derived from variable annotations (PEP-526).
+ """
+
+ def test_basic_annotations(self):
+ """
+ Sets the `Attribute.type` attr from basic type annotations.
+ """
+
+ @attr.resolve_types
+ @attr.s
+ class C:
+ x: int = attr.ib()
+ y = attr.ib(type=str)
+ z = attr.ib()
+
+ assert int is attr.fields(C).x.type
+ assert str is attr.fields(C).y.type
+ assert None is attr.fields(C).z.type
+ assert_init_annotations(C, x=int, y=str)
+
+ def test_catches_basic_type_conflict(self):
+ """
+ Raises ValueError if type is specified both ways.
+ """
+ with pytest.raises(ValueError) as e:
+
+ @attr.s
+ class C:
+ x: int = attr.ib(type=int)
+
+ assert (
+ "Type annotation and type argument cannot both be present",
+ ) == e.value.args
+
+ def test_typing_annotations(self):
+ """
+ Sets the `Attribute.type` attr from typing annotations.
+ """
+
+ @attr.resolve_types
+ @attr.s
+ class C:
+ x: typing.List[int] = attr.ib()
+ y = attr.ib(type=typing.Optional[str])
+
+ assert typing.List[int] is attr.fields(C).x.type
+ assert typing.Optional[str] is attr.fields(C).y.type
+ assert_init_annotations(C, x=typing.List[int], y=typing.Optional[str])
+
+ def test_only_attrs_annotations_collected(self):
+ """
+ Annotations that aren't set to an attr.ib are ignored.
+ """
+
+ @attr.resolve_types
+ @attr.s
+ class C:
+ x: typing.List[int] = attr.ib()
+ y: int
+
+ assert 1 == len(attr.fields(C))
+ assert_init_annotations(C, x=typing.List[int])
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_auto_attribs(self, slots):
+ """
+ If *auto_attribs* is True, bare annotations are collected too.
+ Defaults work and class variables are ignored.
+ """
+
+ @attr.s(auto_attribs=True, slots=slots)
+ class C:
+ cls_var: typing.ClassVar[int] = 23
+ a: int
+ x: typing.List[int] = attr.Factory(list)
+ y: int = 2
+ z: int = attr.ib(default=3)
+ foo: typing.Any = None
+
+ i = C(42)
+ assert "C(a=42, x=[], y=2, z=3, foo=None)" == repr(i)
+
+ attr_names = set(a.name for a in C.__attrs_attrs__)
+ assert "a" in attr_names # just double check that the set works
+ assert "cls_var" not in attr_names
+
+ attr.resolve_types(C)
+
+ assert int == attr.fields(C).a.type
+
+ assert attr.Factory(list) == attr.fields(C).x.default
+ assert typing.List[int] == attr.fields(C).x.type
+
+ assert int == attr.fields(C).y.type
+ assert 2 == attr.fields(C).y.default
+
+ assert int == attr.fields(C).z.type
+
+ assert typing.Any == attr.fields(C).foo.type
+
+ # Class body is clean.
+ if slots is False:
+ with pytest.raises(AttributeError):
+ C.y
+
+ assert 2 == i.y
+ else:
+ assert isinstance(C.y, types.MemberDescriptorType)
+
+ i.y = 23
+ assert 23 == i.y
+
+ assert_init_annotations(
+ C,
+ a=int,
+ x=typing.List[int],
+ y=int,
+ z=int,
+ foo=typing.Optional[typing.Any],
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_auto_attribs_unannotated(self, slots):
+ """
+ Unannotated `attr.ib`s raise an error.
+ """
+ with pytest.raises(UnannotatedAttributeError) as e:
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class C:
+ v = attr.ib()
+ x: int
+ y = attr.ib()
+ z: str
+
+ assert (
+ "The following `attr.ib`s lack a type annotation: v, y.",
+ ) == e.value.args
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_auto_attribs_subclassing(self, slots):
+ """
+ Attributes from base classes are inherited, it doesn't matter if the
+ subclass has annotations or not.
+
+ Ref #291
+ """
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: int = 1
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class B(A):
+ b: int = 2
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class C(A):
+ pass
+
+ assert "B(a=1, b=2)" == repr(B())
+ assert "C(a=1)" == repr(C())
+ assert_init_annotations(A, a=int)
+ assert_init_annotations(B, a=int, b=int)
+ assert_init_annotations(C, a=int)
+
+ def test_converter_annotations(self):
+ """
+ An unannotated attribute with an annotated converter gets its
+ annotation from the converter.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=int2str)
+
+ assert_init_annotations(A, a=int)
+
+ def int2str_(x: int, y: str = ""):
+ return str(x)
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=int2str_)
+
+ assert_init_annotations(A, a=int)
+
+ def test_converter_attrib_annotations(self):
+ """
+ If a converter is provided, an explicit type annotation has no
+ effect on an attribute's type annotation.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ @attr.s
+ class A:
+ a: str = attr.ib(converter=int2str)
+ b = attr.ib(converter=int2str, type=str)
+
+ assert_init_annotations(A, a=int, b=int)
+
+ def test_non_introspectable_converter(self):
+ """
+ A non-introspectable converter doesn't cause a crash.
+ """
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=print)
+
+ def test_nullary_converter(self):
+ """
+ A coverter with no arguments doesn't cause a crash.
+ """
+
+ def noop():
+ pass
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=noop)
+
+ assert A.__init__.__annotations__ == {"return": None}
+
+ def test_pipe(self):
+ """
+ pipe() uses the input annotation of its first argument and the
+ output annotation of its last argument.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ def strlen(y: str) -> int:
+ return len(y)
+
+ def identity(z):
+ return z
+
+ assert attr.converters.pipe(int2str).__annotations__ == {
+ "val": int,
+ "return": str,
+ }
+ assert attr.converters.pipe(int2str, strlen).__annotations__ == {
+ "val": int,
+ "return": int,
+ }
+ assert attr.converters.pipe(identity, strlen).__annotations__ == {
+ "return": int
+ }
+ assert attr.converters.pipe(int2str, identity).__annotations__ == {
+ "val": int
+ }
+
+ def int2str_(x: int, y: int = 0) -> str:
+ return str(x)
+
+ assert attr.converters.pipe(int2str_).__annotations__ == {
+ "val": int,
+ "return": str,
+ }
+
+ def test_pipe_empty(self):
+ """
+ pipe() with no converters is annotated like the identity.
+ """
+
+ p = attr.converters.pipe()
+ assert "val" in p.__annotations__
+ t = p.__annotations__["val"]
+ assert isinstance(t, typing.TypeVar)
+ assert p.__annotations__ == {"val": t, "return": t}
+
+ def test_pipe_non_introspectable(self):
+ """
+ pipe() doesn't crash when passed a non-introspectable converter.
+ """
+
+ assert attr.converters.pipe(print).__annotations__ == {}
+
+ def test_pipe_nullary(self):
+ """
+ pipe() doesn't crash when passed a nullary converter.
+ """
+
+ def noop():
+ pass
+
+ assert attr.converters.pipe(noop).__annotations__ == {}
+
+ def test_optional(self):
+ """
+ optional() uses the annotations of the converter it wraps.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ def int_identity(x: int):
+ return x
+
+ def strify(x) -> str:
+ return str(x)
+
+ def identity(x):
+ return x
+
+ assert attr.converters.optional(int2str).__annotations__ == {
+ "val": typing.Optional[int],
+ "return": typing.Optional[str],
+ }
+ assert attr.converters.optional(int_identity).__annotations__ == {
+ "val": typing.Optional[int]
+ }
+ assert attr.converters.optional(strify).__annotations__ == {
+ "return": typing.Optional[str]
+ }
+ assert attr.converters.optional(identity).__annotations__ == {}
+
+ def int2str_(x: int, y: int = 0) -> str:
+ return str(x)
+
+ assert attr.converters.optional(int2str_).__annotations__ == {
+ "val": typing.Optional[int],
+ "return": typing.Optional[str],
+ }
+
+ def test_optional_non_introspectable(self):
+ """
+ optional() doesn't crash when passed a non-introspectable
+ converter.
+ """
+
+ assert attr.converters.optional(print).__annotations__ == {}
+
+ def test_optional_nullary(self):
+ """
+ optional() doesn't crash when passed a nullary converter.
+ """
+
+ def noop():
+ pass
+
+ assert attr.converters.optional(noop).__annotations__ == {}
+
+ @pytest.mark.xfail(
+ sys.version_info[:2] == (3, 6), reason="Does not work on 3.6."
+ )
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_annotations_strings(self, slots):
+ """
+ String annotations are passed into __init__ as is.
+
+ It fails on 3.6 due to a bug in Python.
+ """
+ import typing as t
+
+ from typing import ClassVar
+
+ @attr.s(auto_attribs=True, slots=slots)
+ class C:
+ cls_var1: "typing.ClassVar[int]" = 23
+ cls_var2: "ClassVar[int]" = 23
+ cls_var3: "t.ClassVar[int]" = 23
+ a: "int"
+ x: "typing.List[int]" = attr.Factory(list)
+ y: "int" = 2
+ z: "int" = attr.ib(default=3)
+ foo: "typing.Any" = None
+
+ attr.resolve_types(C, locals(), globals())
+
+ assert_init_annotations(
+ C,
+ a=int,
+ x=typing.List[int],
+ y=int,
+ z=int,
+ foo=typing.Optional[typing.Any],
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_typing_extensions_classvar(self, slots):
+ """
+ If ClassVar is coming from typing_extensions, it is recognized too.
+ """
+
+ @attr.s(auto_attribs=True, slots=slots)
+ class C:
+ cls_var: "typing_extensions.ClassVar" = 23 # noqa
+
+ assert_init_annotations(C)
+
+ def test_keyword_only_auto_attribs(self):
+ """
+ `kw_only` propagates to attributes defined via `auto_attribs`.
+ """
+
+ @attr.s(auto_attribs=True, kw_only=True)
+ class C:
+ x: int
+ y: int
+
+ with pytest.raises(TypeError):
+ C(0, 1)
+
+ with pytest.raises(TypeError):
+ C(x=0)
+
+ c = C(x=0, y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_base_class_variable(self):
+ """
+ Base class' class variables can be overridden with an attribute
+ without resorting to using an explicit `attr.ib()`.
+ """
+
+ class Base:
+ x: int = 42
+
+ @attr.s(auto_attribs=True)
+ class C(Base):
+ x: int
+
+ assert 1 == C(1).x
+
+ def test_removes_none_too(self):
+ """
+ Regression test for #523: make sure defaults that are set to None are
+ removed too.
+ """
+
+ @attr.s(auto_attribs=True)
+ class C:
+ x: int = 42
+ y: typing.Any = None
+
+ with pytest.raises(AttributeError):
+ C.x
+
+ with pytest.raises(AttributeError):
+ C.y
+
+ def test_non_comparable_defaults(self):
+ """
+ Regression test for #585: objects that are not directly comparable
+ (for example numpy arrays) would cause a crash when used as
+ default values of an attrs auto-attrib class.
+ """
+
+ class NonComparable:
+ def __eq__(self, other):
+ raise ValueError
+
+ @attr.s(auto_attribs=True)
+ class C:
+ x: typing.Any = NonComparable()
+
+ def test_basic_resolve(self):
+ """
+ Resolve the `Attribute.type` attr from basic type annotations.
+ Unannotated types are ignored.
+ """
+
+ @attr.s
+ class C:
+ x: "int" = attr.ib()
+ y = attr.ib(type=str)
+ z = attr.ib()
+
+ attr.resolve_types(C)
+
+ assert int is attr.fields(C).x.type
+ assert str is attr.fields(C).y.type
+ assert None is attr.fields(C).z.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_resolve_types_auto_attrib(self, slots):
+ """
+ Types can be resolved even when strings are involved.
+ """
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: typing.List[int]
+ b: typing.List["int"]
+ c: "typing.List[int]"
+
+ # Note: I don't have to pass globals and locals here because
+ # int is a builtin and will be available in any scope.
+ attr.resolve_types(A)
+
+ assert typing.List[int] == attr.fields(A).a.type
+ assert typing.List[int] == attr.fields(A).b.type
+ assert typing.List[int] == attr.fields(A).c.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_resolve_types_decorator(self, slots):
+ """
+ Types can be resolved using it as a decorator.
+ """
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: typing.List[int]
+ b: typing.List["int"]
+ c: "typing.List[int]"
+
+ assert typing.List[int] == attr.fields(A).a.type
+ assert typing.List[int] == attr.fields(A).b.type
+ assert typing.List[int] == attr.fields(A).c.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_self_reference(self, slots):
+ """
+ References to self class using quotes can be resolved.
+ """
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: "A"
+ b: typing.Optional["A"] # noqa: will resolve below
+
+ attr.resolve_types(A, globals(), locals())
+
+ assert A == attr.fields(A).a.type
+ assert typing.Optional[A] == attr.fields(A).b.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_forward_reference(self, slots):
+ """
+ Forward references can be resolved.
+ """
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: typing.List["B"] # noqa: will resolve below
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class B:
+ a: A
+
+ attr.resolve_types(A, globals(), locals())
+ attr.resolve_types(B, globals(), locals())
+
+ assert typing.List[B] == attr.fields(A).a.type
+ assert A == attr.fields(B).a.type
+
+ assert typing.List[B] == attr.fields(A).a.type
+ assert A == attr.fields(B).a.type
+
+ def test_init_type_hints(self):
+ """
+ Forward references in __init__ can be automatically resolved.
+ """
+
+ @attr.s
+ class C:
+ x = attr.ib(type="typing.List[int]")
+
+ assert_init_annotations(C, x=typing.List[int])
+
+ def test_init_type_hints_fake_module(self):
+ """
+ If you somehow set the __module__ to something that doesn't exist
+ you'll lose __init__ resolution.
+ """
+
+ class C:
+ x = attr.ib(type="typing.List[int]")
+
+ C.__module__ = "totally fake"
+ C = attr.s(C)
+
+ with pytest.raises(NameError):
+ typing.get_type_hints(C.__init__)
+
+ def test_inheritance(self):
+ """
+ Subclasses can be resolved after the parent is resolved.
+ """
+
+ @attr.define()
+ class A:
+ n: "int"
+
+ @attr.define()
+ class B(A):
+ pass
+
+ attr.resolve_types(A)
+ attr.resolve_types(B)
+
+ assert int == attr.fields(A).n.type
+ assert int == attr.fields(B).n.type
+
+ def test_resolve_twice(self):
+ """
+ You can call resolve_types as many times as you like.
+ This test is here mostly for coverage.
+ """
+
+ @attr.define()
+ class A:
+ n: "int"
+
+ attr.resolve_types(A)
+ assert int == attr.fields(A).n.type
+ attr.resolve_types(A)
+ assert int == attr.fields(A).n.type
+
+
+@pytest.mark.parametrize(
+ "annot",
+ [
+ typing.ClassVar,
+ "typing.ClassVar",
+ "'typing.ClassVar[dict]'",
+ "t.ClassVar[int]",
+ ],
+)
+def test_is_class_var(annot):
+ """
+ ClassVars are detected, even if they're a string or quoted.
+ """
+ assert _is_class_var(annot)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py
new file mode 100644
index 0000000000..ec2c687489
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py
@@ -0,0 +1,510 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for methods from `attrib._cmp`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+from attr._cmp import cmp_using
+from attr._compat import PY2
+
+
+# Test parameters.
+EqCSameType = cmp_using(eq=lambda a, b: a == b, class_name="EqCSameType")
+PartialOrderCSameType = cmp_using(
+ eq=lambda a, b: a == b,
+ lt=lambda a, b: a < b,
+ class_name="PartialOrderCSameType",
+)
+FullOrderCSameType = cmp_using(
+ eq=lambda a, b: a == b,
+ lt=lambda a, b: a < b,
+ le=lambda a, b: a <= b,
+ gt=lambda a, b: a > b,
+ ge=lambda a, b: a >= b,
+ class_name="FullOrderCSameType",
+)
+
+EqCAnyType = cmp_using(
+ eq=lambda a, b: a == b, require_same_type=False, class_name="EqCAnyType"
+)
+PartialOrderCAnyType = cmp_using(
+ eq=lambda a, b: a == b,
+ lt=lambda a, b: a < b,
+ require_same_type=False,
+ class_name="PartialOrderCAnyType",
+)
+
+
+eq_data = [
+ (EqCSameType, True),
+ (EqCAnyType, False),
+]
+
+order_data = [
+ (PartialOrderCSameType, True),
+ (PartialOrderCAnyType, False),
+ (FullOrderCSameType, True),
+]
+
+eq_ids = [c[0].__name__ for c in eq_data]
+order_ids = [c[0].__name__ for c in order_data]
+
+cmp_data = eq_data + order_data
+cmp_ids = eq_ids + order_ids
+
+
+class TestEqOrder(object):
+ """
+ Tests for eq and order related methods.
+ """
+
+ #########
+ # eq
+ #########
+ @pytest.mark.parametrize("cls, requires_same_type", cmp_data, ids=cmp_ids)
+ def test_equal_same_type(self, cls, requires_same_type):
+ """
+ Equal objects are detected as equal.
+ """
+ assert cls(1) == cls(1)
+ assert not (cls(1) != cls(1))
+
+ @pytest.mark.parametrize("cls, requires_same_type", cmp_data, ids=cmp_ids)
+ def test_unequal_same_type(self, cls, requires_same_type):
+ """
+ Unequal objects of correct type are detected as unequal.
+ """
+ assert cls(1) != cls(2)
+ assert not (cls(1) == cls(2))
+
+ @pytest.mark.parametrize("cls, requires_same_type", cmp_data, ids=cmp_ids)
+ def test_equal_different_type(self, cls, requires_same_type):
+ """
+ Equal values of different types are detected appropriately.
+ """
+ assert (cls(1) == cls(1.0)) == (not requires_same_type)
+ assert not (cls(1) != cls(1.0)) == (not requires_same_type)
+
+ #########
+ # lt
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_lt_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __lt__.
+ """
+ with pytest.raises(TypeError):
+ cls(1) < cls(2)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_lt_same_type(self, cls, requires_same_type):
+ """
+ Less-than objects are detected appropriately.
+ """
+ assert cls(1) < cls(2)
+ assert not (cls(2) < cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_lt_same_type(self, cls, requires_same_type):
+ """
+ Not less-than objects are detected appropriately.
+ """
+ assert cls(2) >= cls(1)
+ assert not (cls(1) >= cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_lt_different_type(self, cls, requires_same_type):
+ """
+ Less-than values of different types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __lt__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(1) < cls(2.0)
+ else:
+ assert cls(1) < cls(2.0)
+ assert not (cls(2) < cls(1.0))
+
+ #########
+ # le
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_le_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __le__.
+ """
+ with pytest.raises(TypeError):
+ cls(1) <= cls(2)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_le_same_type(self, cls, requires_same_type):
+ """
+ Less-than-or-equal objects are detected appropriately.
+ """
+ assert cls(1) <= cls(1)
+ assert cls(1) <= cls(2)
+ assert not (cls(2) <= cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_le_same_type(self, cls, requires_same_type):
+ """
+ Not less-than-or-equal objects are detected appropriately.
+ """
+ assert cls(2) > cls(1)
+ assert not (cls(1) > cls(1))
+ assert not (cls(1) > cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_le_different_type(self, cls, requires_same_type):
+ """
+ Less-than-or-equal values of diff. types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __le__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(1) <= cls(2.0)
+ else:
+ assert cls(1) <= cls(2.0)
+ assert cls(1) <= cls(1.0)
+ assert not (cls(2) <= cls(1.0))
+
+ #########
+ # gt
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_gt_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __gt__.
+ """
+ with pytest.raises(TypeError):
+ cls(2) > cls(1)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_gt_same_type(self, cls, requires_same_type):
+ """
+ Greater-than objects are detected appropriately.
+ """
+ assert cls(2) > cls(1)
+ assert not (cls(1) > cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_gt_same_type(self, cls, requires_same_type):
+ """
+ Not greater-than objects are detected appropriately.
+ """
+ assert cls(1) <= cls(2)
+ assert not (cls(2) <= cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_gt_different_type(self, cls, requires_same_type):
+ """
+ Greater-than values of different types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __gt__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(2) > cls(1.0)
+ else:
+ assert cls(2) > cls(1.0)
+ assert not (cls(1) > cls(2.0))
+
+ #########
+ # ge
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_ge_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __ge__.
+ """
+ with pytest.raises(TypeError):
+ cls(2) >= cls(1)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_ge_same_type(self, cls, requires_same_type):
+ """
+ Greater-than-or-equal objects are detected appropriately.
+ """
+ assert cls(1) >= cls(1)
+ assert cls(2) >= cls(1)
+ assert not (cls(1) >= cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_ge_same_type(self, cls, requires_same_type):
+ """
+ Not greater-than-or-equal objects are detected appropriately.
+ """
+ assert cls(1) < cls(2)
+ assert not (cls(1) < cls(1))
+ assert not (cls(2) < cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_ge_different_type(self, cls, requires_same_type):
+ """
+ Greater-than-or-equal values of diff. types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __ge__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(2) >= cls(1.0)
+ else:
+ assert cls(2) >= cls(2.0)
+ assert cls(2) >= cls(1.0)
+ assert not (cls(1) >= cls(2.0))
+
+
+class TestDundersUnnamedClass(object):
+ """
+ Tests for dunder attributes of unnamed classes.
+ """
+
+ cls = cmp_using(eq=lambda a, b: a == b)
+
+ def test_class(self):
+ """
+ Class name and qualified name should be well behaved.
+ """
+ assert self.cls.__name__ == "Comparable"
+ if not PY2:
+ assert self.cls.__qualname__ == "Comparable"
+
+ def test_eq(self):
+ """
+ __eq__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__eq__
+ assert method.__doc__.strip() == "Return a == b. Computed by attrs."
+ assert method.__name__ == "__eq__"
+
+ def test_ne(self):
+ """
+ __ne__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ne__
+ assert method.__doc__.strip() == (
+ "Check equality and either forward a NotImplemented or\n"
+ " return the result negated."
+ )
+ assert method.__name__ == "__ne__"
+
+
+class TestTotalOrderingException(object):
+ """
+ Test for exceptions related to total ordering.
+ """
+
+ def test_eq_must_specified(self):
+ """
+ `total_ordering` requires `__eq__` to be specified.
+ """
+ with pytest.raises(ValueError) as ei:
+ cmp_using(lt=lambda a, b: a < b)
+
+ assert ei.value.args[0] == (
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+
+
+class TestNotImplementedIsPropagated(object):
+ """
+ Test related to functions that return NotImplemented.
+ """
+
+ def test_not_implemented_is_propagated(self):
+ """
+ If the comparison function returns NotImplemented,
+ the dunder method should too.
+ """
+ C = cmp_using(eq=lambda a, b: NotImplemented if a == 1 else a == b)
+
+ assert C(2) == C(2)
+ assert C(1) != C(1)
+
+
+class TestDundersPartialOrdering(object):
+ """
+ Tests for dunder attributes of classes with partial ordering.
+ """
+
+ cls = PartialOrderCSameType
+
+ def test_class(self):
+ """
+ Class name and qualified name should be well behaved.
+ """
+ assert self.cls.__name__ == "PartialOrderCSameType"
+ if not PY2:
+ assert self.cls.__qualname__ == "PartialOrderCSameType"
+
+ def test_eq(self):
+ """
+ __eq__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__eq__
+ assert method.__doc__.strip() == "Return a == b. Computed by attrs."
+ assert method.__name__ == "__eq__"
+
+ def test_ne(self):
+ """
+ __ne__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ne__
+ assert method.__doc__.strip() == (
+ "Check equality and either forward a NotImplemented or\n"
+ " return the result negated."
+ )
+ assert method.__name__ == "__ne__"
+
+ def test_lt(self):
+ """
+ __lt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__lt__
+ assert method.__doc__.strip() == "Return a < b. Computed by attrs."
+ assert method.__name__ == "__lt__"
+
+ def test_le(self):
+ """
+ __le__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__le__
+ if PY2:
+ assert method.__doc__ == "x.__le__(y) <==> x<=y"
+ else:
+ assert method.__doc__.strip().startswith(
+ "Return a <= b. Computed by @total_ordering from"
+ )
+ assert method.__name__ == "__le__"
+
+ def test_gt(self):
+ """
+ __gt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__gt__
+ if PY2:
+ assert method.__doc__ == "x.__gt__(y) <==> x>y"
+ else:
+ assert method.__doc__.strip().startswith(
+ "Return a > b. Computed by @total_ordering from"
+ )
+ assert method.__name__ == "__gt__"
+
+ def test_ge(self):
+ """
+ __ge__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ge__
+ if PY2:
+ assert method.__doc__ == "x.__ge__(y) <==> x>=y"
+ else:
+ assert method.__doc__.strip().startswith(
+ "Return a >= b. Computed by @total_ordering from"
+ )
+ assert method.__name__ == "__ge__"
+
+
+class TestDundersFullOrdering(object):
+ """
+ Tests for dunder attributes of classes with full ordering.
+ """
+
+ cls = FullOrderCSameType
+
+ def test_class(self):
+ """
+ Class name and qualified name should be well behaved.
+ """
+ assert self.cls.__name__ == "FullOrderCSameType"
+ if not PY2:
+ assert self.cls.__qualname__ == "FullOrderCSameType"
+
+ def test_eq(self):
+ """
+ __eq__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__eq__
+ assert method.__doc__.strip() == "Return a == b. Computed by attrs."
+ assert method.__name__ == "__eq__"
+
+ def test_ne(self):
+ """
+ __ne__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ne__
+ assert method.__doc__.strip() == (
+ "Check equality and either forward a NotImplemented or\n"
+ " return the result negated."
+ )
+ assert method.__name__ == "__ne__"
+
+ def test_lt(self):
+ """
+ __lt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__lt__
+ assert method.__doc__.strip() == "Return a < b. Computed by attrs."
+ assert method.__name__ == "__lt__"
+
+ def test_le(self):
+ """
+ __le__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__le__
+ assert method.__doc__.strip() == "Return a <= b. Computed by attrs."
+ assert method.__name__ == "__le__"
+
+ def test_gt(self):
+ """
+ __gt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__gt__
+ assert method.__doc__.strip() == "Return a > b. Computed by attrs."
+ assert method.__name__ == "__gt__"
+
+ def test_ge(self):
+ """
+ __ge__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ge__
+ assert method.__doc__.strip() == "Return a >= b. Computed by attrs."
+ assert method.__name__ == "__ge__"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py
new file mode 100644
index 0000000000..464b492f0f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py
@@ -0,0 +1,52 @@
+# SPDX-License-Identifier: MIT
+
+import pytest
+
+from attr._compat import metadata_proxy
+
+
+@pytest.fixture(name="mp")
+def _mp():
+ return metadata_proxy({"x": 42, "y": "foo"})
+
+
+class TestMetadataProxy:
+ """
+ Ensure properties of metadata_proxy independently of hypothesis strategies.
+ """
+
+ def test_repr(self, mp):
+ """
+ repr makes sense and is consistent across Python versions.
+ """
+ assert any(
+ [
+ "mappingproxy({'x': 42, 'y': 'foo'})" == repr(mp),
+ "mappingproxy({'y': 'foo', 'x': 42})" == repr(mp),
+ ]
+ )
+
+ def test_immutable(self, mp):
+ """
+ All mutating methods raise errors.
+ """
+ with pytest.raises(TypeError, match="not support item assignment"):
+ mp["z"] = 23
+
+ with pytest.raises(TypeError, match="not support item deletion"):
+ del mp["x"]
+
+ with pytest.raises(AttributeError, match="no attribute 'update'"):
+ mp.update({})
+
+ with pytest.raises(AttributeError, match="no attribute 'clear'"):
+ mp.clear()
+
+ with pytest.raises(AttributeError, match="no attribute 'pop'"):
+ mp.pop("x")
+
+ with pytest.raises(AttributeError, match="no attribute 'popitem'"):
+ mp.popitem()
+
+ with pytest.raises(AttributeError, match="no attribute 'setdefault'"):
+ mp.setdefault("x")
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py
new file mode 100644
index 0000000000..bbf6756406
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr._config`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+from attr import _config
+
+
+class TestConfig(object):
+ def test_default(self):
+ """
+ Run validators by default.
+ """
+ assert True is _config._run_validators
+
+ def test_set_run_validators(self):
+ """
+ Sets `_run_validators`.
+ """
+ _config.set_run_validators(False)
+ assert False is _config._run_validators
+ _config.set_run_validators(True)
+ assert True is _config._run_validators
+
+ def test_get_run_validators(self):
+ """
+ Returns `_run_validators`.
+ """
+ _config._run_validators = False
+ assert _config._run_validators is _config.get_run_validators()
+ _config._run_validators = True
+ assert _config._run_validators is _config.get_run_validators()
+
+ def test_wrong_type(self):
+ """
+ Passing anything else than a boolean raises TypeError.
+ """
+ with pytest.raises(TypeError) as e:
+ _config.set_run_validators("False")
+ assert "'run' must be bool." == e.value.args[0]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py
new file mode 100644
index 0000000000..d0fc723eb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py
@@ -0,0 +1,163 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr.converters`.
+"""
+
+from __future__ import absolute_import
+
+import pytest
+
+import attr
+
+from attr import Factory, attrib
+from attr.converters import default_if_none, optional, pipe, to_bool
+
+
+class TestOptional(object):
+ """
+ Tests for `optional`.
+ """
+
+ def test_success_with_type(self):
+ """
+ Wrapped converter is used as usual if value is not None.
+ """
+ c = optional(int)
+
+ assert c("42") == 42
+
+ def test_success_with_none(self):
+ """
+ Nothing happens if None.
+ """
+ c = optional(int)
+
+ assert c(None) is None
+
+ def test_fail(self):
+ """
+ Propagates the underlying conversion error when conversion fails.
+ """
+ c = optional(int)
+
+ with pytest.raises(ValueError):
+ c("not_an_int")
+
+
+class TestDefaultIfNone(object):
+ def test_missing_default(self):
+ """
+ Raises TypeError if neither default nor factory have been passed.
+ """
+ with pytest.raises(TypeError, match="Must pass either"):
+ default_if_none()
+
+ def test_too_many_defaults(self):
+ """
+ Raises TypeError if both default and factory are passed.
+ """
+ with pytest.raises(TypeError, match="but not both"):
+ default_if_none(True, lambda: 42)
+
+ def test_factory_takes_self(self):
+ """
+ Raises ValueError if passed Factory has takes_self=True.
+ """
+ with pytest.raises(ValueError, match="takes_self"):
+ default_if_none(Factory(list, takes_self=True))
+
+ @pytest.mark.parametrize("val", [1, 0, True, False, "foo", "", object()])
+ def test_not_none(self, val):
+ """
+ If a non-None value is passed, it's handed down.
+ """
+ c = default_if_none("nope")
+
+ assert val == c(val)
+
+ c = default_if_none(factory=list)
+
+ assert val == c(val)
+
+ def test_none_value(self):
+ """
+ Default values are returned when a None is passed.
+ """
+ c = default_if_none(42)
+
+ assert 42 == c(None)
+
+ def test_none_factory(self):
+ """
+ Factories are used if None is passed.
+ """
+ c = default_if_none(factory=list)
+
+ assert [] == c(None)
+
+ c = default_if_none(default=Factory(list))
+
+ assert [] == c(None)
+
+
+class TestPipe(object):
+ def test_success(self):
+ """
+ Succeeds if all wrapped converters succeed.
+ """
+ c = pipe(str, to_bool, bool)
+
+ assert True is c("True") is c(True)
+
+ def test_fail(self):
+ """
+ Fails if any wrapped converter fails.
+ """
+ c = pipe(str, to_bool)
+
+ # First wrapped converter fails:
+ with pytest.raises(ValueError):
+ c(33)
+
+ # Last wrapped converter fails:
+ with pytest.raises(ValueError):
+ c("33")
+
+ def test_sugar(self):
+ """
+ `pipe(c1, c2, c3)` and `[c1, c2, c3]` are equivalent.
+ """
+
+ @attr.s
+ class C(object):
+ a1 = attrib(default="True", converter=pipe(str, to_bool, bool))
+ a2 = attrib(default=True, converter=[str, to_bool, bool])
+
+ c = C()
+ assert True is c.a1 is c.a2
+
+
+class TestToBool(object):
+ def test_unhashable(self):
+ """
+ Fails if value is unhashable.
+ """
+ with pytest.raises(ValueError, match="Cannot convert value to bool"):
+ to_bool([])
+
+ def test_truthy(self):
+ """
+ Fails if truthy values are incorrectly converted.
+ """
+ assert to_bool("t")
+ assert to_bool("yes")
+ assert to_bool("on")
+
+ def test_falsy(self):
+ """
+ Fails if falsy values are incorrectly converted.
+ """
+ assert not to_bool("f")
+ assert not to_bool("no")
+ assert not to_bool("off")
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py
new file mode 100644
index 0000000000..186762eb0d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py
@@ -0,0 +1,1008 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for dunder methods from `attrib._make`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+import pickle
+
+import pytest
+
+from hypothesis import given
+from hypothesis.strategies import booleans
+
+import attr
+
+from attr._make import (
+ NOTHING,
+ Factory,
+ _add_repr,
+ _is_slot_cls,
+ _make_init,
+ _Nothing,
+ fields,
+ make_class,
+)
+from attr.validators import instance_of
+
+from .utils import simple_attr, simple_class
+
+
+EqC = simple_class(eq=True)
+EqCSlots = simple_class(eq=True, slots=True)
+OrderC = simple_class(order=True)
+OrderCSlots = simple_class(order=True, slots=True)
+ReprC = simple_class(repr=True)
+ReprCSlots = simple_class(repr=True, slots=True)
+
+
+@attr.s(eq=True)
+class EqCallableC(object):
+ a = attr.ib(eq=str.lower, order=False)
+ b = attr.ib(eq=True)
+
+
+@attr.s(eq=True, slots=True)
+class EqCallableCSlots(object):
+ a = attr.ib(eq=str.lower, order=False)
+ b = attr.ib(eq=True)
+
+
+@attr.s(order=True)
+class OrderCallableC(object):
+ a = attr.ib(eq=True, order=str.lower)
+ b = attr.ib(order=True)
+
+
+@attr.s(order=True, slots=True)
+class OrderCallableCSlots(object):
+ a = attr.ib(eq=True, order=str.lower)
+ b = attr.ib(order=True)
+
+
+# HashC is hashable by explicit definition while HashCSlots is hashable
+# implicitly. The "Cached" versions are the same, except with hash code
+# caching enabled
+HashC = simple_class(hash=True)
+HashCSlots = simple_class(hash=None, eq=True, frozen=True, slots=True)
+HashCCached = simple_class(hash=True, cache_hash=True)
+HashCSlotsCached = simple_class(
+ hash=None, eq=True, frozen=True, slots=True, cache_hash=True
+)
+# the cached hash code is stored slightly differently in this case
+# so it needs to be tested separately
+HashCFrozenNotSlotsCached = simple_class(
+ frozen=True, slots=False, hash=True, cache_hash=True
+)
+
+
+def _add_init(cls, frozen):
+ """
+ Add a __init__ method to *cls*. If *frozen* is True, make it immutable.
+
+ This function used to be part of _make. It wasn't used anymore however
+ the tests for it are still useful to test the behavior of _make_init.
+ """
+ cls.__init__ = _make_init(
+ cls,
+ cls.__attrs_attrs__,
+ getattr(cls, "__attrs_pre_init__", False),
+ getattr(cls, "__attrs_post_init__", False),
+ frozen,
+ _is_slot_cls(cls),
+ cache_hash=False,
+ base_attr_map={},
+ is_exc=False,
+ cls_on_setattr=None,
+ attrs_init=False,
+ )
+ return cls
+
+
+class InitC(object):
+ __attrs_attrs__ = [simple_attr("a"), simple_attr("b")]
+
+
+InitC = _add_init(InitC, False)
+
+
+class TestEqOrder(object):
+ """
+ Tests for eq and order related methods.
+ """
+
+ @given(booleans())
+ def test_eq_ignore_attrib(self, slots):
+ """
+ If `eq` is False for an attribute, ignore that attribute.
+ """
+ C = make_class(
+ "C", {"a": attr.ib(eq=False), "b": attr.ib()}, slots=slots
+ )
+
+ assert C(1, 2) == C(2, 2)
+
+ @pytest.mark.parametrize("cls", [EqC, EqCSlots])
+ def test_equal(self, cls):
+ """
+ Equal objects are detected as equal.
+ """
+ assert cls(1, 2) == cls(1, 2)
+ assert not (cls(1, 2) != cls(1, 2))
+
+ @pytest.mark.parametrize("cls", [EqCallableC, EqCallableCSlots])
+ def test_equal_callable(self, cls):
+ """
+ Equal objects are detected as equal.
+ """
+ assert cls("Test", 1) == cls("test", 1)
+ assert cls("Test", 1) != cls("test", 2)
+ assert not (cls("Test", 1) != cls("test", 1))
+ assert not (cls("Test", 1) == cls("test", 2))
+
+ @pytest.mark.parametrize("cls", [EqC, EqCSlots])
+ def test_unequal_same_class(self, cls):
+ """
+ Unequal objects of correct type are detected as unequal.
+ """
+ assert cls(1, 2) != cls(2, 1)
+ assert not (cls(1, 2) == cls(2, 1))
+
+ @pytest.mark.parametrize("cls", [EqCallableC, EqCallableCSlots])
+ def test_unequal_same_class_callable(self, cls):
+ """
+ Unequal objects of correct type are detected as unequal.
+ """
+ assert cls("Test", 1) != cls("foo", 2)
+ assert not (cls("Test", 1) == cls("foo", 2))
+
+ @pytest.mark.parametrize(
+ "cls", [EqC, EqCSlots, EqCallableC, EqCallableCSlots]
+ )
+ def test_unequal_different_class(self, cls):
+ """
+ Unequal objects of different type are detected even if their attributes
+ match.
+ """
+
+ class NotEqC(object):
+ a = 1
+ b = 2
+
+ assert cls(1, 2) != NotEqC()
+ assert not (cls(1, 2) == NotEqC())
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_lt(self, cls):
+ """
+ __lt__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((1, 2), (2, 1)),
+ ((1, 2), (1, 3)),
+ (("a", "b"), ("b", "a")),
+ ]:
+ assert cls(*a) < cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_lt_callable(self, cls):
+ """
+ __lt__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("test1", 1), ("Test1", 2)),
+ (("test0", 1), ("Test1", 1)),
+ ]:
+ assert cls(*a) < cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_lt_unordable(self, cls):
+ """
+ __lt__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__lt__(42))
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_le(self, cls):
+ """
+ __le__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((1, 2), (2, 1)),
+ ((1, 2), (1, 3)),
+ ((1, 1), (1, 1)),
+ (("a", "b"), ("b", "a")),
+ (("a", "b"), ("a", "b")),
+ ]:
+ assert cls(*a) <= cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_le_callable(self, cls):
+ """
+ __le__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("test1", 1), ("Test1", 1)),
+ (("test1", 1), ("Test1", 2)),
+ (("test0", 1), ("Test1", 1)),
+ (("test0", 2), ("Test1", 1)),
+ ]:
+ assert cls(*a) <= cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_le_unordable(self, cls):
+ """
+ __le__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__le__(42))
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_gt(self, cls):
+ """
+ __gt__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((2, 1), (1, 2)),
+ ((1, 3), (1, 2)),
+ (("b", "a"), ("a", "b")),
+ ]:
+ assert cls(*a) > cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_gt_callable(self, cls):
+ """
+ __gt__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("Test1", 2), ("test1", 1)),
+ (("Test1", 1), ("test0", 1)),
+ ]:
+ assert cls(*a) > cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_gt_unordable(self, cls):
+ """
+ __gt__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__gt__(42))
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_ge(self, cls):
+ """
+ __ge__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((2, 1), (1, 2)),
+ ((1, 3), (1, 2)),
+ ((1, 1), (1, 1)),
+ (("b", "a"), ("a", "b")),
+ (("a", "b"), ("a", "b")),
+ ]:
+ assert cls(*a) >= cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_ge_callable(self, cls):
+ """
+ __ge__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("Test1", 1), ("test1", 1)),
+ (("Test1", 2), ("test1", 1)),
+ (("Test1", 1), ("test0", 1)),
+ (("Test1", 1), ("test0", 2)),
+ ]:
+ assert cls(*a) >= cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_ge_unordable(self, cls):
+ """
+ __ge__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__ge__(42))
+
+
+class TestAddRepr(object):
+ """
+ Tests for `_add_repr`.
+ """
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_repr(self, slots):
+ """
+ If `repr` is False, ignore that attribute.
+ """
+ C = make_class(
+ "C", {"a": attr.ib(repr=False), "b": attr.ib()}, slots=slots
+ )
+
+ assert "C(b=2)" == repr(C(1, 2))
+
+ @pytest.mark.parametrize("cls", [ReprC, ReprCSlots])
+ def test_repr_works(self, cls):
+ """
+ repr returns a sensible value.
+ """
+ assert "C(a=1, b=2)" == repr(cls(1, 2))
+
+ def test_custom_repr_works(self):
+ """
+ repr returns a sensible value for attributes with a custom repr
+ callable.
+ """
+
+ def custom_repr(value):
+ return "foo:" + str(value)
+
+ @attr.s
+ class C(object):
+ a = attr.ib(repr=custom_repr)
+
+ assert "C(a=foo:1)" == repr(C(1))
+
+ def test_infinite_recursion(self):
+ """
+ In the presence of a cyclic graph, repr will emit an ellipsis and not
+ raise an exception.
+ """
+
+ @attr.s
+ class Cycle(object):
+ value = attr.ib(default=7)
+ cycle = attr.ib(default=None)
+
+ cycle = Cycle()
+ cycle.cycle = cycle
+ assert "Cycle(value=7, cycle=...)" == repr(cycle)
+
+ def test_infinite_recursion_long_cycle(self):
+ """
+ A cyclic graph can pass through other non-attrs objects, and repr will
+ still emit an ellipsis and not raise an exception.
+ """
+
+ @attr.s
+ class LongCycle(object):
+ value = attr.ib(default=14)
+ cycle = attr.ib(default=None)
+
+ cycle = LongCycle()
+ # Ensure that the reference cycle passes through a non-attrs object.
+ # This demonstrates the need for a thread-local "global" ID tracker.
+ cycle.cycle = {"cycle": [cycle]}
+ assert "LongCycle(value=14, cycle={'cycle': [...]})" == repr(cycle)
+
+ def test_underscores(self):
+ """
+ repr does not strip underscores.
+ """
+
+ class C(object):
+ __attrs_attrs__ = [simple_attr("_x")]
+
+ C = _add_repr(C)
+ i = C()
+ i._x = 42
+
+ assert "C(_x=42)" == repr(i)
+
+ def test_repr_uninitialized_member(self):
+ """
+ repr signals unset attributes
+ """
+ C = make_class("C", {"a": attr.ib(init=False)})
+
+ assert "C(a=NOTHING)" == repr(C())
+
+ @given(add_str=booleans(), slots=booleans())
+ def test_str(self, add_str, slots):
+ """
+ If str is True, it returns the same as repr.
+
+ This only makes sense when subclassing a class with an poor __str__
+ (like Exceptions).
+ """
+
+ @attr.s(str=add_str, slots=slots)
+ class Error(Exception):
+ x = attr.ib()
+
+ e = Error(42)
+
+ assert (str(e) == repr(e)) is add_str
+
+ def test_str_no_repr(self):
+ """
+ Raises a ValueError if repr=False and str=True.
+ """
+ with pytest.raises(ValueError) as e:
+ simple_class(repr=False, str=True)
+
+ assert (
+ "__str__ can only be generated if a __repr__ exists."
+ ) == e.value.args[0]
+
+
+# these are for use in TestAddHash.test_cache_hash_serialization
+# they need to be out here so they can be un-pickled
+@attr.attrs(hash=True, cache_hash=False)
+class HashCacheSerializationTestUncached(object):
+ foo_value = attr.ib()
+
+
+@attr.attrs(hash=True, cache_hash=True)
+class HashCacheSerializationTestCached(object):
+ foo_value = attr.ib()
+
+
+@attr.attrs(slots=True, hash=True, cache_hash=True)
+class HashCacheSerializationTestCachedSlots(object):
+ foo_value = attr.ib()
+
+
+class IncrementingHasher(object):
+ def __init__(self):
+ self.hash_value = 100
+
+ def __hash__(self):
+ rv = self.hash_value
+ self.hash_value += 1
+ return rv
+
+
+class TestAddHash(object):
+ """
+ Tests for `_add_hash`.
+ """
+
+ def test_enforces_type(self):
+ """
+ The `hash` argument to both attrs and attrib must be None, True, or
+ False.
+ """
+ exc_args = ("Invalid value for hash. Must be True, False, or None.",)
+
+ with pytest.raises(TypeError) as e:
+ make_class("C", {}, hash=1),
+
+ assert exc_args == e.value.args
+
+ with pytest.raises(TypeError) as e:
+ make_class("C", {"a": attr.ib(hash=1)}),
+
+ assert exc_args == e.value.args
+
+ def test_enforce_no_cache_hash_without_hash(self):
+ """
+ Ensure exception is thrown if caching the hash code is requested
+ but attrs is not requested to generate `__hash__`.
+ """
+ exc_args = (
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled.",
+ )
+ with pytest.raises(TypeError) as e:
+ make_class("C", {}, hash=False, cache_hash=True)
+ assert exc_args == e.value.args
+
+ # unhashable case
+ with pytest.raises(TypeError) as e:
+ make_class(
+ "C", {}, hash=None, eq=True, frozen=False, cache_hash=True
+ )
+ assert exc_args == e.value.args
+
+ def test_enforce_no_cached_hash_without_init(self):
+ """
+ Ensure exception is thrown if caching the hash code is requested
+ but attrs is not requested to generate `__init__`.
+ """
+ exc_args = (
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True.",
+ )
+ with pytest.raises(TypeError) as e:
+ make_class("C", {}, init=False, hash=True, cache_hash=True)
+ assert exc_args == e.value.args
+
+ @given(booleans(), booleans())
+ def test_hash_attribute(self, slots, cache_hash):
+ """
+ If `hash` is False on an attribute, ignore that attribute.
+ """
+ C = make_class(
+ "C",
+ {"a": attr.ib(hash=False), "b": attr.ib()},
+ slots=slots,
+ hash=True,
+ cache_hash=cache_hash,
+ )
+
+ assert hash(C(1, 2)) == hash(C(2, 2))
+
+ @given(booleans())
+ def test_hash_attribute_mirrors_eq(self, eq):
+ """
+ If `hash` is None, the hash generation mirrors `eq`.
+ """
+ C = make_class("C", {"a": attr.ib(eq=eq)}, eq=True, frozen=True)
+
+ if eq:
+ assert C(1) != C(2)
+ assert hash(C(1)) != hash(C(2))
+ assert hash(C(1)) == hash(C(1))
+ else:
+ assert C(1) == C(2)
+ assert hash(C(1)) == hash(C(2))
+
+ @given(booleans())
+ def test_hash_mirrors_eq(self, eq):
+ """
+ If `hash` is None, the hash generation mirrors `eq`.
+ """
+ C = make_class("C", {"a": attr.ib()}, eq=eq, frozen=True)
+
+ i = C(1)
+
+ assert i == i
+ assert hash(i) == hash(i)
+
+ if eq:
+ assert C(1) == C(1)
+ assert hash(C(1)) == hash(C(1))
+ else:
+ assert C(1) != C(1)
+ assert hash(C(1)) != hash(C(1))
+
+ @pytest.mark.parametrize(
+ "cls",
+ [
+ HashC,
+ HashCSlots,
+ HashCCached,
+ HashCSlotsCached,
+ HashCFrozenNotSlotsCached,
+ ],
+ )
+ def test_hash_works(self, cls):
+ """
+ __hash__ returns different hashes for different values.
+ """
+ a = cls(1, 2)
+ b = cls(1, 1)
+ assert hash(a) != hash(b)
+ # perform the test again to test the pre-cached path through
+ # __hash__ for the cached-hash versions
+ assert hash(a) != hash(b)
+
+ def test_hash_default(self):
+ """
+ Classes are not hashable by default.
+ """
+ C = make_class("C", {})
+
+ with pytest.raises(TypeError) as e:
+ hash(C())
+
+ assert e.value.args[0] in (
+ "'C' objects are unhashable", # PyPy
+ "unhashable type: 'C'", # CPython
+ )
+
+ def test_cache_hashing(self):
+ """
+ Ensure that hash computation if cached if and only if requested
+ """
+
+ class HashCounter:
+ """
+ A class for testing which counts how many times its hash
+ has been requested
+ """
+
+ def __init__(self):
+ self.times_hash_called = 0
+
+ def __hash__(self):
+ self.times_hash_called += 1
+ return 12345
+
+ Uncached = make_class(
+ "Uncached",
+ {"hash_counter": attr.ib(factory=HashCounter)},
+ hash=True,
+ cache_hash=False,
+ )
+ Cached = make_class(
+ "Cached",
+ {"hash_counter": attr.ib(factory=HashCounter)},
+ hash=True,
+ cache_hash=True,
+ )
+
+ uncached_instance = Uncached()
+ cached_instance = Cached()
+
+ hash(uncached_instance)
+ hash(uncached_instance)
+ hash(cached_instance)
+ hash(cached_instance)
+
+ assert 2 == uncached_instance.hash_counter.times_hash_called
+ assert 1 == cached_instance.hash_counter.times_hash_called
+
+ @pytest.mark.parametrize("cache_hash", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_copy_hash_cleared(self, cache_hash, frozen, slots):
+ """
+ Test that the default hash is recalculated after a copy operation.
+ """
+
+ kwargs = dict(frozen=frozen, slots=slots, cache_hash=cache_hash)
+
+ # Give it an explicit hash if we don't have an implicit one
+ if not frozen:
+ kwargs["hash"] = True
+
+ @attr.s(**kwargs)
+ class C(object):
+ x = attr.ib()
+
+ a = C(IncrementingHasher())
+ # Ensure that any hash cache would be calculated before copy
+ orig_hash = hash(a)
+ b = copy.deepcopy(a)
+
+ if kwargs["cache_hash"]:
+ # For cache_hash classes, this call is cached
+ assert orig_hash == hash(a)
+
+ assert orig_hash != hash(b)
+
+ @pytest.mark.parametrize(
+ "klass,cached",
+ [
+ (HashCacheSerializationTestUncached, False),
+ (HashCacheSerializationTestCached, True),
+ (HashCacheSerializationTestCachedSlots, True),
+ ],
+ )
+ def test_cache_hash_serialization_hash_cleared(self, klass, cached):
+ """
+ Tests that the hash cache is cleared on deserialization to fix
+ https://github.com/python-attrs/attrs/issues/482 .
+
+ This test is intended to guard against a stale hash code surviving
+ across serialization (which may cause problems when the hash value
+ is different in different interpreters).
+ """
+
+ obj = klass(IncrementingHasher())
+ original_hash = hash(obj)
+ obj_rt = self._roundtrip_pickle(obj)
+
+ if cached:
+ assert original_hash == hash(obj)
+
+ assert original_hash != hash(obj_rt)
+
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_copy_two_arg_reduce(self, frozen):
+ """
+ If __getstate__ returns None, the tuple returned by object.__reduce__
+ won't contain the state dictionary; this test ensures that the custom
+ __reduce__ generated when cache_hash=True works in that case.
+ """
+
+ @attr.s(frozen=frozen, cache_hash=True, hash=True)
+ class C(object):
+ x = attr.ib()
+
+ def __getstate__(self):
+ return None
+
+ # By the nature of this test it doesn't really create an object that's
+ # in a valid state - it basically does the equivalent of
+ # `object.__new__(C)`, so it doesn't make much sense to assert anything
+ # about the result of the copy. This test will just check that it
+ # doesn't raise an *error*.
+ copy.deepcopy(C(1))
+
+ def _roundtrip_pickle(self, obj):
+ pickle_str = pickle.dumps(obj)
+ return pickle.loads(pickle_str)
+
+
+class TestAddInit(object):
+ """
+ Tests for `_add_init`.
+ """
+
+ @given(booleans(), booleans())
+ def test_init(self, slots, frozen):
+ """
+ If `init` is False, ignore that attribute.
+ """
+ C = make_class(
+ "C",
+ {"a": attr.ib(init=False), "b": attr.ib()},
+ slots=slots,
+ frozen=frozen,
+ )
+ with pytest.raises(TypeError) as e:
+ C(a=1, b=2)
+
+ assert e.value.args[0].endswith(
+ "__init__() got an unexpected keyword argument 'a'"
+ )
+
+ @given(booleans(), booleans())
+ def test_no_init_default(self, slots, frozen):
+ """
+ If `init` is False but a Factory is specified, don't allow passing that
+ argument but initialize it anyway.
+ """
+ C = make_class(
+ "C",
+ {
+ "_a": attr.ib(init=False, default=42),
+ "_b": attr.ib(init=False, default=Factory(list)),
+ "c": attr.ib(),
+ },
+ slots=slots,
+ frozen=frozen,
+ )
+ with pytest.raises(TypeError):
+ C(a=1, c=2)
+ with pytest.raises(TypeError):
+ C(b=1, c=2)
+
+ i = C(23)
+ assert (42, [], 23) == (i._a, i._b, i.c)
+
+ @given(booleans(), booleans())
+ def test_no_init_order(self, slots, frozen):
+ """
+ If an attribute is `init=False`, it's legal to come after a mandatory
+ attribute.
+ """
+ make_class(
+ "C",
+ {"a": attr.ib(default=Factory(list)), "b": attr.ib(init=False)},
+ slots=slots,
+ frozen=frozen,
+ )
+
+ def test_sets_attributes(self):
+ """
+ The attributes are initialized using the passed keywords.
+ """
+ obj = InitC(a=1, b=2)
+ assert 1 == obj.a
+ assert 2 == obj.b
+
+ def test_default(self):
+ """
+ If a default value is present, it's used as fallback.
+ """
+
+ class C(object):
+ __attrs_attrs__ = [
+ simple_attr(name="a", default=2),
+ simple_attr(name="b", default="hallo"),
+ simple_attr(name="c", default=None),
+ ]
+
+ C = _add_init(C, False)
+ i = C()
+ assert 2 == i.a
+ assert "hallo" == i.b
+ assert None is i.c
+
+ def test_factory(self):
+ """
+ If a default factory is present, it's used as fallback.
+ """
+
+ class D(object):
+ pass
+
+ class C(object):
+ __attrs_attrs__ = [
+ simple_attr(name="a", default=Factory(list)),
+ simple_attr(name="b", default=Factory(D)),
+ ]
+
+ C = _add_init(C, False)
+ i = C()
+
+ assert [] == i.a
+ assert isinstance(i.b, D)
+
+ def test_validator(self):
+ """
+ If a validator is passed, call it with the preliminary instance, the
+ Attribute, and the argument.
+ """
+
+ class VException(Exception):
+ pass
+
+ def raiser(*args):
+ raise VException(*args)
+
+ C = make_class("C", {"a": attr.ib("a", validator=raiser)})
+ with pytest.raises(VException) as e:
+ C(42)
+
+ assert (fields(C).a, 42) == e.value.args[1:]
+ assert isinstance(e.value.args[0], C)
+
+ def test_validator_slots(self):
+ """
+ If a validator is passed, call it with the preliminary instance, the
+ Attribute, and the argument.
+ """
+
+ class VException(Exception):
+ pass
+
+ def raiser(*args):
+ raise VException(*args)
+
+ C = make_class("C", {"a": attr.ib("a", validator=raiser)}, slots=True)
+ with pytest.raises(VException) as e:
+ C(42)
+
+ assert (fields(C)[0], 42) == e.value.args[1:]
+ assert isinstance(e.value.args[0], C)
+
+ @given(booleans())
+ def test_validator_others(self, slots):
+ """
+ Does not interfere when setting non-attrs attributes.
+ """
+ C = make_class(
+ "C", {"a": attr.ib("a", validator=instance_of(int))}, slots=slots
+ )
+ i = C(1)
+
+ assert 1 == i.a
+
+ if not slots:
+ i.b = "foo"
+ assert "foo" == i.b
+ else:
+ with pytest.raises(AttributeError):
+ i.b = "foo"
+
+ def test_underscores(self):
+ """
+ The argument names in `__init__` are without leading and trailing
+ underscores.
+ """
+
+ class C(object):
+ __attrs_attrs__ = [simple_attr("_private")]
+
+ C = _add_init(C, False)
+ i = C(private=42)
+ assert 42 == i._private
+
+
+class TestNothing(object):
+ """
+ Tests for `_Nothing`.
+ """
+
+ def test_copy(self):
+ """
+ __copy__ returns the same object.
+ """
+ n = _Nothing()
+ assert n is copy.copy(n)
+
+ def test_deepcopy(self):
+ """
+ __deepcopy__ returns the same object.
+ """
+ n = _Nothing()
+ assert n is copy.deepcopy(n)
+
+ def test_eq(self):
+ """
+ All instances are equal.
+ """
+ assert _Nothing() == _Nothing() == NOTHING
+ assert not (_Nothing() != _Nothing())
+ assert 1 != _Nothing()
+
+ def test_false(self):
+ """
+ NOTHING evaluates as falsey.
+ """
+ assert not NOTHING
+ assert False is bool(NOTHING)
+
+
+@attr.s(hash=True, order=True)
+class C(object):
+ pass
+
+
+# Store this class so that we recreate it.
+OriginalC = C
+
+
+@attr.s(hash=True, order=True)
+class C(object):
+ pass
+
+
+CopyC = C
+
+
+@attr.s(hash=True, order=True)
+class C(object):
+ """A different class, to generate different methods."""
+
+ a = attr.ib()
+
+
+class TestFilenames(object):
+ def test_filenames(self):
+ """
+ The created dunder methods have a "consistent" filename.
+ """
+ assert (
+ OriginalC.__init__.__code__.co_filename
+ == "<attrs generated init tests.test_dunders.C>"
+ )
+ assert (
+ OriginalC.__eq__.__code__.co_filename
+ == "<attrs generated eq tests.test_dunders.C>"
+ )
+ assert (
+ OriginalC.__hash__.__code__.co_filename
+ == "<attrs generated hash tests.test_dunders.C>"
+ )
+ assert (
+ CopyC.__init__.__code__.co_filename
+ == "<attrs generated init tests.test_dunders.C>"
+ )
+ assert (
+ CopyC.__eq__.__code__.co_filename
+ == "<attrs generated eq tests.test_dunders.C>"
+ )
+ assert (
+ CopyC.__hash__.__code__.co_filename
+ == "<attrs generated hash tests.test_dunders.C>"
+ )
+ assert (
+ C.__init__.__code__.co_filename
+ == "<attrs generated init tests.test_dunders.C-1>"
+ )
+ assert (
+ C.__eq__.__code__.co_filename
+ == "<attrs generated eq tests.test_dunders.C-1>"
+ )
+ assert (
+ C.__hash__.__code__.co_filename
+ == "<attrs generated hash tests.test_dunders.C-1>"
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py
new file mode 100644
index 0000000000..d1ec24dc6c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py
@@ -0,0 +1,111 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr.filters`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+import attr
+
+from attr import fields
+from attr.filters import _split_what, exclude, include
+
+
+@attr.s
+class C(object):
+ a = attr.ib()
+ b = attr.ib()
+
+
+class TestSplitWhat(object):
+ """
+ Tests for `_split_what`.
+ """
+
+ def test_splits(self):
+ """
+ Splits correctly.
+ """
+ assert (
+ frozenset((int, str)),
+ frozenset((fields(C).a,)),
+ ) == _split_what((str, fields(C).a, int))
+
+
+class TestInclude(object):
+ """
+ Tests for `include`.
+ """
+
+ @pytest.mark.parametrize(
+ "incl,value",
+ [
+ ((int,), 42),
+ ((str,), "hello"),
+ ((str, fields(C).a), 42),
+ ((str, fields(C).b), "hello"),
+ ],
+ )
+ def test_allow(self, incl, value):
+ """
+ Return True if a class or attribute is included.
+ """
+ i = include(*incl)
+ assert i(fields(C).a, value) is True
+
+ @pytest.mark.parametrize(
+ "incl,value",
+ [
+ ((str,), 42),
+ ((int,), "hello"),
+ ((str, fields(C).b), 42),
+ ((int, fields(C).b), "hello"),
+ ],
+ )
+ def test_drop_class(self, incl, value):
+ """
+ Return False on non-included classes and attributes.
+ """
+ i = include(*incl)
+ assert i(fields(C).a, value) is False
+
+
+class TestExclude(object):
+ """
+ Tests for `exclude`.
+ """
+
+ @pytest.mark.parametrize(
+ "excl,value",
+ [
+ ((str,), 42),
+ ((int,), "hello"),
+ ((str, fields(C).b), 42),
+ ((int, fields(C).b), "hello"),
+ ],
+ )
+ def test_allow(self, excl, value):
+ """
+ Return True if class or attribute is not excluded.
+ """
+ e = exclude(*excl)
+ assert e(fields(C).a, value) is True
+
+ @pytest.mark.parametrize(
+ "excl,value",
+ [
+ ((int,), 42),
+ ((str,), "hello"),
+ ((str, fields(C).a), 42),
+ ((str, fields(C).b), "hello"),
+ ],
+ )
+ def test_drop_class(self, excl, value):
+ """
+ Return True on non-excluded classes and attributes.
+ """
+ e = exclude(*excl)
+ assert e(fields(C).a, value) is False
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py
new file mode 100644
index 0000000000..4490ed815a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py
@@ -0,0 +1,680 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr._funcs`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from collections import OrderedDict
+
+import pytest
+
+from hypothesis import assume, given
+from hypothesis import strategies as st
+
+import attr
+
+from attr import asdict, assoc, astuple, evolve, fields, has
+from attr._compat import TYPE, Mapping, Sequence, ordered_dict
+from attr.exceptions import AttrsAttributeNotFoundError
+from attr.validators import instance_of
+
+from .strategies import nested_classes, simple_classes
+
+
+MAPPING_TYPES = (dict, OrderedDict)
+SEQUENCE_TYPES = (list, tuple)
+
+
+@pytest.fixture(scope="session", name="C")
+def _C():
+ """
+ Return a simple but fully featured attrs class with an x and a y attribute.
+ """
+ import attr
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ y = attr.ib()
+
+ return C
+
+
+class TestAsDict(object):
+ """
+ Tests for `asdict`.
+ """
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_shallow(self, C, dict_factory):
+ """
+ Shallow asdict returns correct dict.
+ """
+ assert {"x": 1, "y": 2} == asdict(
+ C(x=1, y=2), False, dict_factory=dict_factory
+ )
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_recurse(self, C, dict_class):
+ """
+ Deep asdict returns correct dict.
+ """
+ assert {"x": {"x": 1, "y": 2}, "y": {"x": 3, "y": 4}} == asdict(
+ C(C(1, 2), C(3, 4)), dict_factory=dict_class
+ )
+
+ def test_nested_lists(self, C):
+ """
+ Test unstructuring deeply nested lists.
+ """
+ inner = C(1, 2)
+ outer = C([[inner]], None)
+
+ assert {"x": [[{"x": 1, "y": 2}]], "y": None} == asdict(outer)
+
+ def test_nested_dicts(self, C):
+ """
+ Test unstructuring deeply nested dictionaries.
+ """
+ inner = C(1, 2)
+ outer = C({1: {2: inner}}, None)
+
+ assert {"x": {1: {2: {"x": 1, "y": 2}}}, "y": None} == asdict(outer)
+
+ @given(nested_classes, st.sampled_from(MAPPING_TYPES))
+ def test_recurse_property(self, cls, dict_class):
+ """
+ Property tests for recursive asdict.
+ """
+ obj = cls()
+ obj_dict = asdict(obj, dict_factory=dict_class)
+
+ def assert_proper_dict_class(obj, obj_dict):
+ assert isinstance(obj_dict, dict_class)
+
+ for field in fields(obj.__class__):
+ field_val = getattr(obj, field.name)
+ if has(field_val.__class__):
+ # This field holds a class, recurse the assertions.
+ assert_proper_dict_class(field_val, obj_dict[field.name])
+ elif isinstance(field_val, Sequence):
+ dict_val = obj_dict[field.name]
+ for item, item_dict in zip(field_val, dict_val):
+ if has(item.__class__):
+ assert_proper_dict_class(item, item_dict)
+ elif isinstance(field_val, Mapping):
+ # This field holds a dictionary.
+ assert isinstance(obj_dict[field.name], dict_class)
+
+ for key, val in field_val.items():
+ if has(val.__class__):
+ assert_proper_dict_class(
+ val, obj_dict[field.name][key]
+ )
+
+ assert_proper_dict_class(obj, obj_dict)
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_filter(self, C, dict_factory):
+ """
+ Attributes that are supposed to be skipped are skipped.
+ """
+ assert {"x": {"x": 1}} == asdict(
+ C(C(1, 2), C(3, 4)),
+ filter=lambda a, v: a.name != "y",
+ dict_factory=dict_factory,
+ )
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples(self, container, C):
+ """
+ If recurse is True, also recurse into lists.
+ """
+ assert {
+ "x": 1,
+ "y": [{"x": 2, "y": 3}, {"x": 4, "y": 5}, "a"],
+ } == asdict(C(1, container([C(2, 3), C(4, 5), "a"])))
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples_retain_type(self, container, C):
+ """
+ If recurse and retain_collection_types are True, also recurse
+ into lists and do not convert them into list.
+ """
+ assert {
+ "x": 1,
+ "y": container([{"x": 2, "y": 3}, {"x": 4, "y": 5}, "a"]),
+ } == asdict(
+ C(1, container([C(2, 3), C(4, 5), "a"])),
+ retain_collection_types=True,
+ )
+
+ @given(set_type=st.sampled_from((set, frozenset)))
+ def test_sets_no_retain(self, C, set_type):
+ """
+ Set types are converted to lists if retain_collection_types=False.
+ """
+ d = asdict(
+ C(1, set_type((1, 2, 3))),
+ retain_collection_types=False,
+ recurse=True,
+ )
+
+ assert {"x": 1, "y": [1, 2, 3]} == d
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_dicts(self, C, dict_factory):
+ """
+ If recurse is True, also recurse into dicts.
+ """
+ res = asdict(C(1, {"a": C(4, 5)}), dict_factory=dict_factory)
+
+ assert {"x": 1, "y": {"a": {"x": 4, "y": 5}}} == res
+ assert isinstance(res, dict_factory)
+
+ @given(simple_classes(private_attrs=False), st.sampled_from(MAPPING_TYPES))
+ def test_roundtrip(self, cls, dict_class):
+ """
+ Test dumping to dicts and back for Hypothesis-generated classes.
+
+ Private attributes don't round-trip (the attribute name is different
+ than the initializer argument).
+ """
+ instance = cls()
+ dict_instance = asdict(instance, dict_factory=dict_class)
+
+ assert isinstance(dict_instance, dict_class)
+
+ roundtrip_instance = cls(**dict_instance)
+
+ assert instance == roundtrip_instance
+
+ @given(simple_classes())
+ def test_asdict_preserve_order(self, cls):
+ """
+ Field order should be preserved when dumping to an ordered_dict.
+ """
+ instance = cls()
+ dict_instance = asdict(instance, dict_factory=ordered_dict)
+
+ assert [a.name for a in fields(cls)] == list(dict_instance.keys())
+
+ def test_retain_keys_are_tuples(self):
+ """
+ retain_collect_types also retains keys.
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ instance = A({(1,): 1})
+
+ assert {"a": {(1,): 1}} == attr.asdict(
+ instance, retain_collection_types=True
+ )
+
+ def test_tuple_keys(self):
+ """
+ If a key is collection type, retain_collection_types is False,
+ the key is serialized as a tuple.
+
+ See #646
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ instance = A({(1,): 1})
+
+ assert {"a": {(1,): 1}} == attr.asdict(instance)
+
+
+class TestAsTuple(object):
+ """
+ Tests for `astuple`.
+ """
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_shallow(self, C, tuple_factory):
+ """
+ Shallow astuple returns correct dict.
+ """
+ assert tuple_factory([1, 2]) == astuple(
+ C(x=1, y=2), False, tuple_factory=tuple_factory
+ )
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_recurse(self, C, tuple_factory):
+ """
+ Deep astuple returns correct tuple.
+ """
+ assert tuple_factory(
+ [tuple_factory([1, 2]), tuple_factory([3, 4])]
+ ) == astuple(C(C(1, 2), C(3, 4)), tuple_factory=tuple_factory)
+
+ @given(nested_classes, st.sampled_from(SEQUENCE_TYPES))
+ def test_recurse_property(self, cls, tuple_class):
+ """
+ Property tests for recursive astuple.
+ """
+ obj = cls()
+ obj_tuple = astuple(obj, tuple_factory=tuple_class)
+
+ def assert_proper_tuple_class(obj, obj_tuple):
+ assert isinstance(obj_tuple, tuple_class)
+ for index, field in enumerate(fields(obj.__class__)):
+ field_val = getattr(obj, field.name)
+ if has(field_val.__class__):
+ # This field holds a class, recurse the assertions.
+ assert_proper_tuple_class(field_val, obj_tuple[index])
+
+ assert_proper_tuple_class(obj, obj_tuple)
+
+ @given(nested_classes, st.sampled_from(SEQUENCE_TYPES))
+ def test_recurse_retain(self, cls, tuple_class):
+ """
+ Property tests for asserting collection types are retained.
+ """
+ obj = cls()
+ obj_tuple = astuple(
+ obj, tuple_factory=tuple_class, retain_collection_types=True
+ )
+
+ def assert_proper_col_class(obj, obj_tuple):
+ # Iterate over all attributes, and if they are lists or mappings
+ # in the original, assert they are the same class in the dumped.
+ for index, field in enumerate(fields(obj.__class__)):
+ field_val = getattr(obj, field.name)
+ if has(field_val.__class__):
+ # This field holds a class, recurse the assertions.
+ assert_proper_col_class(field_val, obj_tuple[index])
+ elif isinstance(field_val, (list, tuple)):
+ # This field holds a sequence of something.
+ expected_type = type(obj_tuple[index])
+ assert type(field_val) is expected_type
+ for obj_e, obj_tuple_e in zip(field_val, obj_tuple[index]):
+ if has(obj_e.__class__):
+ assert_proper_col_class(obj_e, obj_tuple_e)
+ elif isinstance(field_val, dict):
+ orig = field_val
+ tupled = obj_tuple[index]
+ assert type(orig) is type(tupled)
+ for obj_e, obj_tuple_e in zip(
+ orig.items(), tupled.items()
+ ):
+ if has(obj_e[0].__class__): # Dict key
+ assert_proper_col_class(obj_e[0], obj_tuple_e[0])
+ if has(obj_e[1].__class__): # Dict value
+ assert_proper_col_class(obj_e[1], obj_tuple_e[1])
+
+ assert_proper_col_class(obj, obj_tuple)
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_filter(self, C, tuple_factory):
+ """
+ Attributes that are supposed to be skipped are skipped.
+ """
+ assert tuple_factory([tuple_factory([1])]) == astuple(
+ C(C(1, 2), C(3, 4)),
+ filter=lambda a, v: a.name != "y",
+ tuple_factory=tuple_factory,
+ )
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples(self, container, C):
+ """
+ If recurse is True, also recurse into lists.
+ """
+ assert (1, [(2, 3), (4, 5), "a"]) == astuple(
+ C(1, container([C(2, 3), C(4, 5), "a"]))
+ )
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_dicts(self, C, tuple_factory):
+ """
+ If recurse is True, also recurse into dicts.
+ """
+ res = astuple(C(1, {"a": C(4, 5)}), tuple_factory=tuple_factory)
+ assert tuple_factory([1, {"a": tuple_factory([4, 5])}]) == res
+ assert isinstance(res, tuple_factory)
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples_retain_type(self, container, C):
+ """
+ If recurse and retain_collection_types are True, also recurse
+ into lists and do not convert them into list.
+ """
+ assert (1, container([(2, 3), (4, 5), "a"])) == astuple(
+ C(1, container([C(2, 3), C(4, 5), "a"])),
+ retain_collection_types=True,
+ )
+
+ @given(container=st.sampled_from(MAPPING_TYPES))
+ def test_dicts_retain_type(self, container, C):
+ """
+ If recurse and retain_collection_types are True, also recurse
+ into lists and do not convert them into list.
+ """
+ assert (1, container({"a": (4, 5)})) == astuple(
+ C(1, container({"a": C(4, 5)})), retain_collection_types=True
+ )
+
+ @given(simple_classes(), st.sampled_from(SEQUENCE_TYPES))
+ def test_roundtrip(self, cls, tuple_class):
+ """
+ Test dumping to tuple and back for Hypothesis-generated classes.
+ """
+ instance = cls()
+ tuple_instance = astuple(instance, tuple_factory=tuple_class)
+
+ assert isinstance(tuple_instance, tuple_class)
+
+ roundtrip_instance = cls(*tuple_instance)
+
+ assert instance == roundtrip_instance
+
+ @given(set_type=st.sampled_from((set, frozenset)))
+ def test_sets_no_retain(self, C, set_type):
+ """
+ Set types are converted to lists if retain_collection_types=False.
+ """
+ d = astuple(
+ C(1, set_type((1, 2, 3))),
+ retain_collection_types=False,
+ recurse=True,
+ )
+
+ assert (1, [1, 2, 3]) == d
+
+
+class TestHas(object):
+ """
+ Tests for `has`.
+ """
+
+ def test_positive(self, C):
+ """
+ Returns `True` on decorated classes.
+ """
+ assert has(C)
+
+ def test_positive_empty(self):
+ """
+ Returns `True` on decorated classes even if there are no attributes.
+ """
+
+ @attr.s
+ class D(object):
+ pass
+
+ assert has(D)
+
+ def test_negative(self):
+ """
+ Returns `False` on non-decorated classes.
+ """
+ assert not has(object)
+
+
+class TestAssoc(object):
+ """
+ Tests for `assoc`.
+ """
+
+ @given(slots=st.booleans(), frozen=st.booleans())
+ def test_empty(self, slots, frozen):
+ """
+ Empty classes without changes get copied.
+ """
+
+ @attr.s(slots=slots, frozen=frozen)
+ class C(object):
+ pass
+
+ i1 = C()
+ with pytest.deprecated_call():
+ i2 = assoc(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes())
+ def test_no_changes(self, C):
+ """
+ No changes means a verbatim copy.
+ """
+ i1 = C()
+ with pytest.deprecated_call():
+ i2 = assoc(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes(), st.data())
+ def test_change(self, C, data):
+ """
+ Changes work.
+ """
+ # Take the first attribute, and change it.
+ assume(fields(C)) # Skip classes with no attributes.
+ field_names = [a.name for a in fields(C)]
+ original = C()
+ chosen_names = data.draw(st.sets(st.sampled_from(field_names)))
+ change_dict = {name: data.draw(st.integers()) for name in chosen_names}
+
+ with pytest.deprecated_call():
+ changed = assoc(original, **change_dict)
+
+ for k, v in change_dict.items():
+ assert getattr(changed, k) == v
+
+ @given(simple_classes())
+ def test_unknown(self, C):
+ """
+ Wanting to change an unknown attribute raises an
+ AttrsAttributeNotFoundError.
+ """
+ # No generated class will have a four letter attribute.
+ with pytest.raises(
+ AttrsAttributeNotFoundError
+ ) as e, pytest.deprecated_call():
+ assoc(C(), aaaa=2)
+
+ assert (
+ "aaaa is not an attrs attribute on {cls!r}.".format(cls=C),
+ ) == e.value.args
+
+ def test_frozen(self):
+ """
+ Works on frozen classes.
+ """
+
+ @attr.s(frozen=True)
+ class C(object):
+ x = attr.ib()
+ y = attr.ib()
+
+ with pytest.deprecated_call():
+ assert C(3, 2) == assoc(C(1, 2), x=3)
+
+ def test_warning(self):
+ """
+ DeprecationWarning points to the correct file.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+
+ with pytest.warns(DeprecationWarning) as wi:
+ assert C(2) == assoc(C(1), x=2)
+
+ assert __file__ == wi.list[0].filename
+
+
+class TestEvolve(object):
+ """
+ Tests for `evolve`.
+ """
+
+ @given(slots=st.booleans(), frozen=st.booleans())
+ def test_empty(self, slots, frozen):
+ """
+ Empty classes without changes get copied.
+ """
+
+ @attr.s(slots=slots, frozen=frozen)
+ class C(object):
+ pass
+
+ i1 = C()
+ i2 = evolve(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes())
+ def test_no_changes(self, C):
+ """
+ No changes means a verbatim copy.
+ """
+ i1 = C()
+ i2 = evolve(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes(), st.data())
+ def test_change(self, C, data):
+ """
+ Changes work.
+ """
+ # Take the first attribute, and change it.
+ assume(fields(C)) # Skip classes with no attributes.
+ field_names = [a.name for a in fields(C)]
+ original = C()
+ chosen_names = data.draw(st.sets(st.sampled_from(field_names)))
+ # We pay special attention to private attributes, they should behave
+ # like in `__init__`.
+ change_dict = {
+ name.replace("_", ""): data.draw(st.integers())
+ for name in chosen_names
+ }
+ changed = evolve(original, **change_dict)
+ for name in chosen_names:
+ assert getattr(changed, name) == change_dict[name.replace("_", "")]
+
+ @given(simple_classes())
+ def test_unknown(self, C):
+ """
+ Wanting to change an unknown attribute raises an
+ AttrsAttributeNotFoundError.
+ """
+ # No generated class will have a four letter attribute.
+ with pytest.raises(TypeError) as e:
+ evolve(C(), aaaa=2)
+
+ if hasattr(C, "__attrs_init__"):
+ expected = (
+ "__attrs_init__() got an unexpected keyword argument 'aaaa'"
+ )
+ else:
+ expected = "__init__() got an unexpected keyword argument 'aaaa'"
+
+ assert e.value.args[0].endswith(expected)
+
+ def test_validator_failure(self):
+ """
+ TypeError isn't swallowed when validation fails within evolve.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib(validator=instance_of(int))
+
+ with pytest.raises(TypeError) as e:
+ evolve(C(a=1), a="some string")
+ m = e.value.args[0]
+
+ assert m.startswith("'a' must be <{type} 'int'>".format(type=TYPE))
+
+ def test_private(self):
+ """
+ evolve() acts as `__init__` with regards to private attributes.
+ """
+
+ @attr.s
+ class C(object):
+ _a = attr.ib()
+
+ assert evolve(C(1), a=2)._a == 2
+
+ with pytest.raises(TypeError):
+ evolve(C(1), _a=2)
+
+ with pytest.raises(TypeError):
+ evolve(C(1), a=3, _a=2)
+
+ def test_non_init_attrs(self):
+ """
+ evolve() handles `init=False` attributes.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib()
+ b = attr.ib(init=False, default=0)
+
+ assert evolve(C(1), a=2).a == 2
+
+ def test_regression_attrs_classes(self):
+ """
+ evolve() can evolve fields that are instances of attrs classes.
+
+ Regression test for #804
+ """
+
+ @attr.s
+ class Cls1(object):
+ param1 = attr.ib()
+
+ @attr.s
+ class Cls2(object):
+ param2 = attr.ib()
+
+ obj2a = Cls2(param2="a")
+ obj2b = Cls2(param2="b")
+
+ obj1a = Cls1(param1=obj2a)
+
+ assert Cls1(param1=Cls2(param2="b")) == attr.evolve(
+ obj1a, param1=obj2b
+ )
+
+ def test_dicts(self):
+ """
+ evolve() can replace an attrs class instance with a dict.
+
+ See #806
+ """
+
+ @attr.s
+ class Cls1(object):
+ param1 = attr.ib()
+
+ @attr.s
+ class Cls2(object):
+ param2 = attr.ib()
+
+ obj2a = Cls2(param2="a")
+ obj2b = {"foo": 42, "param2": 42}
+
+ obj1a = Cls1(param1=obj2a)
+
+ assert Cls1({"foo": 42, "param2": 42}) == attr.evolve(
+ obj1a, param1=obj2b
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py
new file mode 100644
index 0000000000..9b6a27e2f4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py
@@ -0,0 +1,790 @@
+# SPDX-License-Identifier: MIT
+
+"""
+End-to-end tests.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import inspect
+import pickle
+
+from copy import deepcopy
+
+import pytest
+import six
+
+from hypothesis import assume, given
+from hypothesis.strategies import booleans
+
+import attr
+
+from attr._compat import PY2, PY36, TYPE
+from attr._make import NOTHING, Attribute
+from attr.exceptions import FrozenInstanceError
+
+from .strategies import optional_bool
+
+
+@attr.s
+class C1(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+
+@attr.s(slots=True)
+class C1Slots(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+
+foo = None
+
+
+@attr.s()
+class C2(object):
+ x = attr.ib(default=foo)
+ y = attr.ib(default=attr.Factory(list))
+
+
+@attr.s(slots=True)
+class C2Slots(object):
+ x = attr.ib(default=foo)
+ y = attr.ib(default=attr.Factory(list))
+
+
+@attr.s
+class Base(object):
+ x = attr.ib()
+
+ def meth(self):
+ return self.x
+
+
+@attr.s(slots=True)
+class BaseSlots(object):
+ x = attr.ib()
+
+ def meth(self):
+ return self.x
+
+
+@attr.s
+class Sub(Base):
+ y = attr.ib()
+
+
+@attr.s(slots=True)
+class SubSlots(BaseSlots):
+ y = attr.ib()
+
+
+@attr.s(frozen=True, slots=True)
+class Frozen(object):
+ x = attr.ib()
+
+
+@attr.s
+class SubFrozen(Frozen):
+ y = attr.ib()
+
+
+@attr.s(frozen=True, slots=False)
+class FrozenNoSlots(object):
+ x = attr.ib()
+
+
+class Meta(type):
+ pass
+
+
+@attr.s
+@six.add_metaclass(Meta)
+class WithMeta(object):
+ pass
+
+
+@attr.s(slots=True)
+@six.add_metaclass(Meta)
+class WithMetaSlots(object):
+ pass
+
+
+FromMakeClass = attr.make_class("FromMakeClass", ["x"])
+
+
+class TestFunctional(object):
+ """
+ Functional tests.
+ """
+
+ @pytest.mark.parametrize("cls", [C2, C2Slots])
+ def test_fields(self, cls):
+ """
+ `attr.fields` works.
+ """
+ assert (
+ Attribute(
+ name="x",
+ default=foo,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ Attribute(
+ name="y",
+ default=attr.Factory(list),
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ ) == attr.fields(cls)
+
+ @pytest.mark.parametrize("cls", [C1, C1Slots])
+ def test_asdict(self, cls):
+ """
+ `attr.asdict` works.
+ """
+ assert {"x": 1, "y": 2} == attr.asdict(cls(x=1, y=2))
+
+ @pytest.mark.parametrize("cls", [C1, C1Slots])
+ def test_validator(self, cls):
+ """
+ `instance_of` raises `TypeError` on type mismatch.
+ """
+ with pytest.raises(TypeError) as e:
+ cls("1", 2)
+
+ # Using C1 explicitly, since slotted classes don't support this.
+ assert (
+ "'x' must be <{type} 'int'> (got '1' that is a <{type} "
+ "'str'>).".format(type=TYPE),
+ attr.fields(C1).x,
+ int,
+ "1",
+ ) == e.value.args
+
+ @given(booleans())
+ def test_renaming(self, slots):
+ """
+ Private members are renamed but only in `__init__`.
+ """
+
+ @attr.s(slots=slots)
+ class C3(object):
+ _x = attr.ib()
+
+ assert "C3(_x=1)" == repr(C3(x=1))
+
+ @given(booleans(), booleans())
+ def test_programmatic(self, slots, frozen):
+ """
+ `attr.make_class` works.
+ """
+ PC = attr.make_class("PC", ["a", "b"], slots=slots, frozen=frozen)
+
+ assert (
+ Attribute(
+ name="a",
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ Attribute(
+ name="b",
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ ) == attr.fields(PC)
+
+ @pytest.mark.parametrize("cls", [Sub, SubSlots])
+ def test_subclassing_with_extra_attrs(self, cls):
+ """
+ Subclassing (where the subclass has extra attrs) does what you'd hope
+ for.
+ """
+ obj = object()
+ i = cls(x=obj, y=2)
+ assert i.x is i.meth() is obj
+ assert i.y == 2
+ if cls is Sub:
+ assert "Sub(x={obj}, y=2)".format(obj=obj) == repr(i)
+ else:
+ assert "SubSlots(x={obj}, y=2)".format(obj=obj) == repr(i)
+
+ @pytest.mark.parametrize("base", [Base, BaseSlots])
+ def test_subclass_without_extra_attrs(self, base):
+ """
+ Subclassing (where the subclass does not have extra attrs) still
+ behaves the same as a subclass with extra attrs.
+ """
+
+ class Sub2(base):
+ pass
+
+ obj = object()
+ i = Sub2(x=obj)
+ assert i.x is i.meth() is obj
+ assert "Sub2(x={obj})".format(obj=obj) == repr(i)
+
+ @pytest.mark.parametrize(
+ "frozen_class",
+ [
+ Frozen, # has slots=True
+ attr.make_class("FrozenToo", ["x"], slots=False, frozen=True),
+ ],
+ )
+ def test_frozen_instance(self, frozen_class):
+ """
+ Frozen instances can't be modified (easily).
+ """
+ frozen = frozen_class(1)
+
+ with pytest.raises(FrozenInstanceError) as e:
+ frozen.x = 2
+
+ with pytest.raises(FrozenInstanceError) as e:
+ del frozen.x
+
+ assert e.value.args[0] == "can't set attribute"
+ assert 1 == frozen.x
+
+ @pytest.mark.parametrize(
+ "cls",
+ [
+ C1,
+ C1Slots,
+ C2,
+ C2Slots,
+ Base,
+ BaseSlots,
+ Sub,
+ SubSlots,
+ Frozen,
+ FrozenNoSlots,
+ FromMakeClass,
+ ],
+ )
+ @pytest.mark.parametrize("protocol", range(2, pickle.HIGHEST_PROTOCOL + 1))
+ def test_pickle_attributes(self, cls, protocol):
+ """
+ Pickling/un-pickling of Attribute instances works.
+ """
+ for attribute in attr.fields(cls):
+ assert attribute == pickle.loads(pickle.dumps(attribute, protocol))
+
+ @pytest.mark.parametrize(
+ "cls",
+ [
+ C1,
+ C1Slots,
+ C2,
+ C2Slots,
+ Base,
+ BaseSlots,
+ Sub,
+ SubSlots,
+ Frozen,
+ FrozenNoSlots,
+ FromMakeClass,
+ ],
+ )
+ @pytest.mark.parametrize("protocol", range(2, pickle.HIGHEST_PROTOCOL + 1))
+ def test_pickle_object(self, cls, protocol):
+ """
+ Pickle object serialization works on all kinds of attrs classes.
+ """
+ if len(attr.fields(cls)) == 2:
+ obj = cls(123, 456)
+ else:
+ obj = cls(123)
+
+ assert repr(obj) == repr(pickle.loads(pickle.dumps(obj, protocol)))
+
+ def test_subclassing_frozen_gives_frozen(self):
+ """
+ The frozen-ness of classes is inherited. Subclasses of frozen classes
+ are also frozen and can be instantiated.
+ """
+ i = SubFrozen("foo", "bar")
+
+ assert i.x == "foo"
+ assert i.y == "bar"
+
+ with pytest.raises(FrozenInstanceError):
+ i.x = "baz"
+
+ @pytest.mark.parametrize("cls", [WithMeta, WithMetaSlots])
+ def test_metaclass_preserved(self, cls):
+ """
+ Metaclass data is preserved.
+ """
+ assert Meta == type(cls)
+
+ def test_default_decorator(self):
+ """
+ Default decorator sets the default and the respective method gets
+ called.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(default=1)
+ y = attr.ib()
+
+ @y.default
+ def compute(self):
+ return self.x + 1
+
+ assert C(1, 2) == C()
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ @pytest.mark.parametrize("weakref_slot", [True, False])
+ def test_attrib_overwrite(self, slots, frozen, weakref_slot):
+ """
+ Subclasses can overwrite attributes of their base class.
+ """
+
+ @attr.s(slots=slots, frozen=frozen, weakref_slot=weakref_slot)
+ class SubOverwrite(Base):
+ x = attr.ib(default=attr.Factory(list))
+
+ assert SubOverwrite([]) == SubOverwrite()
+
+ def test_dict_patch_class(self):
+ """
+ dict-classes are never replaced.
+ """
+
+ class C(object):
+ x = attr.ib()
+
+ C_new = attr.s(C)
+
+ assert C_new is C
+
+ def test_hash_by_id(self):
+ """
+ With dict classes, hashing by ID is active for hash=False even on
+ Python 3. This is incorrect behavior but we have to retain it for
+ backward compatibility.
+ """
+
+ @attr.s(hash=False)
+ class HashByIDBackwardCompat(object):
+ x = attr.ib()
+
+ assert hash(HashByIDBackwardCompat(1)) != hash(
+ HashByIDBackwardCompat(1)
+ )
+
+ @attr.s(hash=False, eq=False)
+ class HashByID(object):
+ x = attr.ib()
+
+ assert hash(HashByID(1)) != hash(HashByID(1))
+
+ @attr.s(hash=True)
+ class HashByValues(object):
+ x = attr.ib()
+
+ assert hash(HashByValues(1)) == hash(HashByValues(1))
+
+ def test_handles_different_defaults(self):
+ """
+ Unhashable defaults + subclassing values work.
+ """
+
+ @attr.s
+ class Unhashable(object):
+ pass
+
+ @attr.s
+ class C(object):
+ x = attr.ib(default=Unhashable())
+
+ @attr.s
+ class D(C):
+ pass
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_hash_false_eq_false(self, slots):
+ """
+ hash=False and eq=False make a class hashable by ID.
+ """
+
+ @attr.s(hash=False, eq=False, slots=slots)
+ class C(object):
+ pass
+
+ assert hash(C()) != hash(C())
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_eq_false(self, slots):
+ """
+ eq=False makes a class hashable by ID.
+ """
+
+ @attr.s(eq=False, slots=slots)
+ class C(object):
+ pass
+
+ # Ensure both objects live long enough such that their ids/hashes
+ # can't be recycled. Thanks to Ask Hjorth Larsen for pointing that
+ # out.
+ c1 = C()
+ c2 = C()
+
+ assert hash(c1) != hash(c2)
+
+ def test_overwrite_base(self):
+ """
+ Base classes can overwrite each other and the attributes are added
+ in the order they are defined.
+ """
+
+ @attr.s
+ class C(object):
+ c = attr.ib(default=100)
+ x = attr.ib(default=1)
+ b = attr.ib(default=23)
+
+ @attr.s
+ class D(C):
+ a = attr.ib(default=42)
+ x = attr.ib(default=2)
+ d = attr.ib(default=3.14)
+
+ @attr.s
+ class E(D):
+ y = attr.ib(default=3)
+ z = attr.ib(default=4)
+
+ assert "E(c=100, b=23, a=42, x=2, d=3.14, y=3, z=4)" == repr(E())
+
+ @pytest.mark.parametrize("base_slots", [True, False])
+ @pytest.mark.parametrize("sub_slots", [True, False])
+ @pytest.mark.parametrize("base_frozen", [True, False])
+ @pytest.mark.parametrize("sub_frozen", [True, False])
+ @pytest.mark.parametrize("base_weakref_slot", [True, False])
+ @pytest.mark.parametrize("sub_weakref_slot", [True, False])
+ @pytest.mark.parametrize("base_converter", [True, False])
+ @pytest.mark.parametrize("sub_converter", [True, False])
+ def test_frozen_slots_combo(
+ self,
+ base_slots,
+ sub_slots,
+ base_frozen,
+ sub_frozen,
+ base_weakref_slot,
+ sub_weakref_slot,
+ base_converter,
+ sub_converter,
+ ):
+ """
+ A class with a single attribute, inheriting from another class
+ with a single attribute.
+ """
+
+ @attr.s(
+ frozen=base_frozen,
+ slots=base_slots,
+ weakref_slot=base_weakref_slot,
+ )
+ class Base(object):
+ a = attr.ib(converter=int if base_converter else None)
+
+ @attr.s(
+ frozen=sub_frozen, slots=sub_slots, weakref_slot=sub_weakref_slot
+ )
+ class Sub(Base):
+ b = attr.ib(converter=int if sub_converter else None)
+
+ i = Sub("1", "2")
+
+ assert i.a == (1 if base_converter else "1")
+ assert i.b == (2 if sub_converter else "2")
+
+ if base_frozen or sub_frozen:
+ with pytest.raises(FrozenInstanceError):
+ i.a = "2"
+
+ with pytest.raises(FrozenInstanceError):
+ i.b = "3"
+
+ def test_tuple_class_aliasing(self):
+ """
+ itemgetter and property are legal attribute names.
+ """
+
+ @attr.s
+ class C(object):
+ property = attr.ib()
+ itemgetter = attr.ib()
+ x = attr.ib()
+
+ assert "property" == attr.fields(C).property.name
+ assert "itemgetter" == attr.fields(C).itemgetter.name
+ assert "x" == attr.fields(C).x.name
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_auto_exc(self, slots, frozen):
+ """
+ Classes with auto_exc=True have a Exception-style __str__, compare and
+ hash by id, and store the fields additionally in self.args.
+ """
+
+ @attr.s(auto_exc=True, slots=slots, frozen=frozen)
+ class FooError(Exception):
+ x = attr.ib()
+ y = attr.ib(init=False, default=42)
+ z = attr.ib(init=False)
+ a = attr.ib()
+
+ FooErrorMade = attr.make_class(
+ "FooErrorMade",
+ bases=(Exception,),
+ attrs={
+ "x": attr.ib(),
+ "y": attr.ib(init=False, default=42),
+ "z": attr.ib(init=False),
+ "a": attr.ib(),
+ },
+ auto_exc=True,
+ slots=slots,
+ frozen=frozen,
+ )
+
+ assert FooError(1, "foo") != FooError(1, "foo")
+ assert FooErrorMade(1, "foo") != FooErrorMade(1, "foo")
+
+ for cls in (FooError, FooErrorMade):
+ with pytest.raises(cls) as ei1:
+ raise cls(1, "foo")
+
+ with pytest.raises(cls) as ei2:
+ raise cls(1, "foo")
+
+ e1 = ei1.value
+ e2 = ei2.value
+
+ assert e1 is e1
+ assert e1 == e1
+ assert e2 == e2
+ assert e1 != e2
+ assert "(1, 'foo')" == str(e1) == str(e2)
+ assert (1, "foo") == e1.args == e2.args
+
+ hash(e1) == hash(e1)
+ hash(e2) == hash(e2)
+
+ if not frozen:
+ deepcopy(e1)
+ deepcopy(e2)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_auto_exc_one_attrib(self, slots, frozen):
+ """
+ Having one attribute works with auto_exc=True.
+
+ Easy to get wrong with tuple literals.
+ """
+
+ @attr.s(auto_exc=True, slots=slots, frozen=frozen)
+ class FooError(Exception):
+ x = attr.ib()
+
+ FooError(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_eq_only(self, slots, frozen):
+ """
+ Classes with order=False cannot be ordered.
+
+ Python 3 throws a TypeError, in Python2 we have to check for the
+ absence.
+ """
+
+ @attr.s(eq=True, order=False, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ if not PY2:
+ possible_errors = (
+ "unorderable types: C() < C()",
+ "'<' not supported between instances of 'C' and 'C'",
+ "unorderable types: C < C", # old PyPy 3
+ )
+
+ with pytest.raises(TypeError) as ei:
+ C(5) < C(6)
+
+ assert ei.value.args[0] in possible_errors
+ else:
+ i = C(42)
+ for m in ("lt", "le", "gt", "ge"):
+ assert None is getattr(i, "__%s__" % (m,), None)
+
+ @given(cmp=optional_bool, eq=optional_bool, order=optional_bool)
+ def test_cmp_deprecated_attribute(self, cmp, eq, order):
+ """
+ Accessing Attribute.cmp raises a deprecation warning but returns True
+ if cmp is True, or eq and order are *both* effectively True.
+ """
+ # These cases are invalid and raise a ValueError.
+ assume(cmp is None or (eq is None and order is None))
+ assume(not (eq is False and order is True))
+
+ if cmp is not None:
+ rv = cmp
+ elif eq is True or eq is None:
+ rv = order is None or order is True
+ elif cmp is None and eq is None and order is None:
+ rv = True
+ elif cmp is None or eq is None:
+ rv = False
+ else:
+ pytest.fail(
+ "Unexpected state: cmp=%r eq=%r order=%r" % (cmp, eq, order)
+ )
+
+ with pytest.deprecated_call() as dc:
+
+ @attr.s
+ class C(object):
+ x = attr.ib(cmp=cmp, eq=eq, order=order)
+
+ assert rv == attr.fields(C).x.cmp
+
+ (w,) = dc.list
+
+ assert (
+ "The usage of `cmp` is deprecated and will be removed on or after "
+ "2021-06-01. Please use `eq` and `order` instead."
+ == w.message.args[0]
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_no_setattr_if_validate_without_validators(self, slots):
+ """
+ If a class has on_setattr=attr.setters.validate (former default in NG
+ APIs) but sets no validators, don't use the (slower) setattr in
+ __init__.
+
+ Regression test for #816.
+ """
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class C(object):
+ x = attr.ib()
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert "self.y = y" in src
+ assert object.__setattr__ == D.__setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_no_setattr_if_convert_without_converters(self, slots):
+ """
+ If a class has on_setattr=attr.setters.convert but sets no validators,
+ don't use the (slower) setattr in __init__.
+ """
+
+ @attr.s(on_setattr=attr.setters.convert)
+ class C(object):
+ x = attr.ib()
+
+ @attr.s(on_setattr=attr.setters.convert)
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert "self.y = y" in src
+ assert object.__setattr__ == D.__setattr__
+
+ @pytest.mark.skipif(not PY36, reason="NG APIs are 3.6+")
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_no_setattr_with_ng_defaults(self, slots):
+ """
+ If a class has the NG default on_setattr=[convert, validate] but sets
+ no validators or converters, don't use the (slower) setattr in
+ __init__.
+ """
+
+ @attr.define
+ class C(object):
+ x = attr.ib()
+
+ src = inspect.getsource(C.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert object.__setattr__ == C.__setattr__
+
+ @attr.define
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert "self.y = y" in src
+ assert object.__setattr__ == D.__setattr__
+
+ def test_on_setattr_detect_inherited_validators(self):
+ """
+ _make_init detects the presence of a validator even if the field is
+ inherited.
+ """
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class C(object):
+ x = attr.ib(validator=42)
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "_setattr = _cached_setattr" in src
+ assert "_setattr('x', x)" in src
+ assert "_setattr('y', y)" in src
+ assert object.__setattr__ != D.__setattr__
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py
new file mode 100644
index 0000000000..92fc2dcaab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py
@@ -0,0 +1,209 @@
+# SPDX-License-Identifier: MIT
+
+from datetime import datetime
+from typing import Dict, List
+
+import attr
+
+
+class TestTransformHook:
+ """
+ Tests for `attrs(tranform_value_serializer=func)`
+ """
+
+ def test_hook_applied(self):
+ """
+ The transform hook is applied to all attributes. Types can be missing,
+ explicitly set, or annotated.
+ """
+ results = []
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ results[:] = [(a.name, a.type) for a in attribs]
+ return attribs
+
+ @attr.s(field_transformer=hook)
+ class C:
+ x = attr.ib()
+ y = attr.ib(type=int)
+ z: float = attr.ib()
+
+ assert results == [("x", None), ("y", int), ("z", float)]
+
+ def test_hook_applied_auto_attrib(self):
+ """
+ The transform hook is applied to all attributes and type annotations
+ are detected.
+ """
+ results = []
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ results[:] = [(a.name, a.type) for a in attribs]
+ return attribs
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int
+ y: str = attr.ib()
+
+ assert results == [("x", int), ("y", str)]
+
+ def test_hook_applied_modify_attrib(self):
+ """
+ The transform hook can modify attributes.
+ """
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ return [a.evolve(converter=a.type) for a in attribs]
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int = attr.ib(converter=int)
+ y: float
+
+ c = C(x="3", y="3.14")
+ assert c == C(x=3, y=3.14)
+
+ def test_hook_remove_field(self):
+ """
+ It is possible to remove fields via the hook.
+ """
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ return [a for a in attribs if a.type is not int]
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int
+ y: float
+
+ assert attr.asdict(C(2.7)) == {"y": 2.7}
+
+ def test_hook_add_field(self):
+ """
+ It is possible to add fields via the hook.
+ """
+
+ def hook(cls, attribs):
+ a1 = attribs[0]
+ a2 = a1.evolve(name="new")
+ return [a1, a2]
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int
+
+ assert attr.asdict(C(1, 2)) == {"x": 1, "new": 2}
+
+ def test_hook_with_inheritance(self):
+ """
+ The hook receives all fields from base classes.
+ """
+
+ def hook(cls, attribs):
+ assert [a.name for a in attribs] == ["x", "y"]
+ # Remove Base' "x"
+ return attribs[1:]
+
+ @attr.s(auto_attribs=True)
+ class Base:
+ x: int
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class Sub(Base):
+ y: int
+
+ assert attr.asdict(Sub(2)) == {"y": 2}
+
+ def test_attrs_attrclass(self):
+ """
+ The list of attrs returned by a field_transformer is converted to
+ "AttrsClass" again.
+
+ Regression test for #821.
+ """
+
+ @attr.s(auto_attribs=True, field_transformer=lambda c, a: list(a))
+ class C:
+ x: int
+
+ fields_type = type(attr.fields(C))
+ assert fields_type.__name__ == "CAttributes"
+ assert issubclass(fields_type, tuple)
+
+
+class TestAsDictHook:
+ def test_asdict(self):
+ """
+ asdict() calls the hooks in attrs classes and in other datastructures
+ like lists or dicts.
+ """
+
+ def hook(inst, a, v):
+ if isinstance(v, datetime):
+ return v.isoformat()
+ return v
+
+ @attr.dataclass
+ class Child:
+ x: datetime
+ y: List[datetime]
+
+ @attr.dataclass
+ class Parent:
+ a: Child
+ b: List[Child]
+ c: Dict[str, Child]
+ d: Dict[str, datetime]
+
+ inst = Parent(
+ a=Child(1, [datetime(2020, 7, 1)]),
+ b=[Child(2, [datetime(2020, 7, 2)])],
+ c={"spam": Child(3, [datetime(2020, 7, 3)])},
+ d={"eggs": datetime(2020, 7, 4)},
+ )
+
+ result = attr.asdict(inst, value_serializer=hook)
+ assert result == {
+ "a": {"x": 1, "y": ["2020-07-01T00:00:00"]},
+ "b": [{"x": 2, "y": ["2020-07-02T00:00:00"]}],
+ "c": {"spam": {"x": 3, "y": ["2020-07-03T00:00:00"]}},
+ "d": {"eggs": "2020-07-04T00:00:00"},
+ }
+
+ def test_asdict_calls(self):
+ """
+ The correct instances and attribute names are passed to the hook.
+ """
+ calls = []
+
+ def hook(inst, a, v):
+ calls.append((inst, a.name if a else a, v))
+ return v
+
+ @attr.dataclass
+ class Child:
+ x: int
+
+ @attr.dataclass
+ class Parent:
+ a: Child
+ b: List[Child]
+ c: Dict[str, Child]
+
+ inst = Parent(a=Child(1), b=[Child(2)], c={"spam": Child(3)})
+
+ attr.asdict(inst, value_serializer=hook)
+ assert calls == [
+ (inst, "a", inst.a),
+ (inst.a, "x", inst.a.x),
+ (inst, "b", inst.b),
+ (inst.b[0], "x", inst.b[0].x),
+ (inst, "c", inst.c),
+ (None, None, "spam"),
+ (inst.c["spam"], "x", inst.c["spam"].x),
+ ]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py
new file mode 100644
index 0000000000..423124319c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py
@@ -0,0 +1,11 @@
+# SPDX-License-Identifier: MIT
+
+
+class TestImportStar(object):
+ def test_from_attr_import_star(self):
+ """
+ import * from attr
+ """
+ # attr_import_star contains `from attr import *`, which cannot
+ # be done here because *-imports are only allowed on module level.
+ from . import attr_import_star # noqa: F401
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py
new file mode 100644
index 0000000000..863e794377
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py
@@ -0,0 +1,48 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `__init_subclass__` related tests.
+
+Python 3.6+ only.
+"""
+
+import pytest
+
+import attr
+
+
+@pytest.mark.parametrize("slots", [True, False])
+def test_init_subclass_vanilla(slots):
+ """
+ `super().__init_subclass__` can be used if the subclass is not an attrs
+ class both with dict and slotted classes.
+ """
+
+ @attr.s(slots=slots)
+ class Base:
+ def __init_subclass__(cls, param, **kw):
+ super().__init_subclass__(**kw)
+ cls.param = param
+
+ class Vanilla(Base, param="foo"):
+ pass
+
+ assert "foo" == Vanilla().param
+
+
+def test_init_subclass_attrs():
+ """
+ `__init_subclass__` works with attrs classes as long as slots=False.
+ """
+
+ @attr.s(slots=False)
+ class Base:
+ def __init_subclass__(cls, param, **kw):
+ super().__init_subclass__(**kw)
+ cls.param = param
+
+ @attr.s
+ class Attrs(Base, param="foo"):
+ pass
+
+ assert "foo" == Attrs().param
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py
new file mode 100644
index 0000000000..729d3a71f0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py
@@ -0,0 +1,2462 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr._make`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+import functools
+import gc
+import inspect
+import itertools
+import sys
+
+from operator import attrgetter
+
+import pytest
+
+from hypothesis import assume, given
+from hypothesis.strategies import booleans, integers, lists, sampled_from, text
+
+import attr
+
+from attr import _config
+from attr._compat import PY2, PY310, ordered_dict
+from attr._make import (
+ Attribute,
+ Factory,
+ _AndValidator,
+ _Attributes,
+ _ClassBuilder,
+ _CountingAttr,
+ _determine_attrib_eq_order,
+ _determine_attrs_eq_order,
+ _determine_whether_to_implement,
+ _transform_attrs,
+ and_,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from attr.exceptions import (
+ DefaultAlreadySetError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+)
+
+from .strategies import (
+ gen_attr_names,
+ list_of_attrs,
+ optional_bool,
+ simple_attrs,
+ simple_attrs_with_metadata,
+ simple_attrs_without_metadata,
+ simple_classes,
+)
+from .utils import simple_attr
+
+
+attrs_st = simple_attrs.map(lambda c: Attribute.from_counting_attr("name", c))
+
+
+class TestCountingAttr(object):
+ """
+ Tests for `attr`.
+ """
+
+ def test_returns_Attr(self):
+ """
+ Returns an instance of _CountingAttr.
+ """
+ a = attr.ib()
+
+ assert isinstance(a, _CountingAttr)
+
+ def test_validators_lists_to_wrapped_tuples(self):
+ """
+ If a list is passed as validator, it's just converted to a tuple.
+ """
+
+ def v1(_, __):
+ pass
+
+ def v2(_, __):
+ pass
+
+ a = attr.ib(validator=[v1, v2])
+
+ assert _AndValidator((v1, v2)) == a._validator
+
+ def test_validator_decorator_single(self):
+ """
+ If _CountingAttr.validator is used as a decorator and there is no
+ decorator set, the decorated method is used as the validator.
+ """
+ a = attr.ib()
+
+ @a.validator
+ def v():
+ pass
+
+ assert v == a._validator
+
+ @pytest.mark.parametrize(
+ "wrap", [lambda v: v, lambda v: [v], lambda v: and_(v)]
+ )
+ def test_validator_decorator(self, wrap):
+ """
+ If _CountingAttr.validator is used as a decorator and there is already
+ a decorator set, the decorators are composed using `and_`.
+ """
+
+ def v(_, __):
+ pass
+
+ a = attr.ib(validator=wrap(v))
+
+ @a.validator
+ def v2(self, _, __):
+ pass
+
+ assert _AndValidator((v, v2)) == a._validator
+
+ def test_default_decorator_already_set(self):
+ """
+ Raise DefaultAlreadySetError if the decorator is used after a default
+ has been set.
+ """
+ a = attr.ib(default=42)
+
+ with pytest.raises(DefaultAlreadySetError):
+
+ @a.default
+ def f(self):
+ pass
+
+ def test_default_decorator_sets(self):
+ """
+ Decorator wraps the method in a Factory with pass_self=True and sets
+ the default.
+ """
+ a = attr.ib()
+
+ @a.default
+ def f(self):
+ pass
+
+ assert Factory(f, True) == a._default
+
+
+def make_tc():
+ class TransformC(object):
+ z = attr.ib()
+ y = attr.ib()
+ x = attr.ib()
+ a = 42
+
+ return TransformC
+
+
+class TestTransformAttrs(object):
+ """
+ Tests for `_transform_attrs`.
+ """
+
+ def test_no_modifications(self):
+ """
+ Does not attach __attrs_attrs__ to the class.
+ """
+ C = make_tc()
+ _transform_attrs(C, None, False, False, True, None)
+
+ assert None is getattr(C, "__attrs_attrs__", None)
+
+ def test_normal(self):
+ """
+ Transforms every `_CountingAttr` and leaves others (a) be.
+ """
+ C = make_tc()
+ attrs, _, _ = _transform_attrs(C, None, False, False, True, None)
+
+ assert ["z", "y", "x"] == [a.name for a in attrs]
+
+ def test_empty(self):
+ """
+ No attributes works as expected.
+ """
+
+ @attr.s
+ class C(object):
+ pass
+
+ assert _Attributes(((), [], {})) == _transform_attrs(
+ C, None, False, False, True, None
+ )
+
+ def test_transforms_to_attribute(self):
+ """
+ All `_CountingAttr`s are transformed into `Attribute`s.
+ """
+ C = make_tc()
+ attrs, base_attrs, _ = _transform_attrs(
+ C, None, False, False, True, None
+ )
+
+ assert [] == base_attrs
+ assert 3 == len(attrs)
+ assert all(isinstance(a, Attribute) for a in attrs)
+
+ def test_conflicting_defaults(self):
+ """
+ Raises `ValueError` if attributes with defaults are followed by
+ mandatory attributes.
+ """
+
+ class C(object):
+ x = attr.ib(default=None)
+ y = attr.ib()
+
+ with pytest.raises(ValueError) as e:
+ _transform_attrs(C, None, False, False, True, None)
+ assert (
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: Attribute"
+ "(name='y', default=NOTHING, validator=None, repr=True, "
+ "eq=True, eq_key=None, order=True, order_key=None, "
+ "hash=None, init=True, "
+ "metadata=mappingproxy({}), type=None, converter=None, "
+ "kw_only=False, inherited=False, on_setattr=None)",
+ ) == e.value.args
+
+ def test_kw_only(self):
+ """
+ Converts all attributes, including base class' attributes, if `kw_only`
+ is provided. Therefore, `kw_only` allows attributes with defaults to
+ preceed mandatory attributes.
+
+ Updates in the subclass *don't* affect the base class attributes.
+ """
+
+ @attr.s
+ class B(object):
+ b = attr.ib()
+
+ for b_a in B.__attrs_attrs__:
+ assert b_a.kw_only is False
+
+ class C(B):
+ x = attr.ib(default=None)
+ y = attr.ib()
+
+ attrs, base_attrs, _ = _transform_attrs(
+ C, None, False, True, True, None
+ )
+
+ assert len(attrs) == 3
+ assert len(base_attrs) == 1
+
+ for a in attrs:
+ assert a.kw_only is True
+
+ for b_a in B.__attrs_attrs__:
+ assert b_a.kw_only is False
+
+ def test_these(self):
+ """
+ If these is passed, use it and ignore body and base classes.
+ """
+
+ class Base(object):
+ z = attr.ib()
+
+ class C(Base):
+ y = attr.ib()
+
+ attrs, base_attrs, _ = _transform_attrs(
+ C, {"x": attr.ib()}, False, False, True, None
+ )
+
+ assert [] == base_attrs
+ assert (simple_attr("x"),) == attrs
+
+ def test_these_leave_body(self):
+ """
+ If these is passed, no attributes are removed from the body.
+ """
+
+ @attr.s(init=False, these={"x": attr.ib()})
+ class C(object):
+ x = 5
+
+ assert 5 == C().x
+ assert "C(x=5)" == repr(C())
+
+ def test_these_ordered(self):
+ """
+ If these is passed ordered attrs, their order respect instead of the
+ counter.
+ """
+ b = attr.ib(default=2)
+ a = attr.ib(default=1)
+
+ @attr.s(these=ordered_dict([("a", a), ("b", b)]))
+ class C(object):
+ pass
+
+ assert "C(a=1, b=2)" == repr(C())
+
+ def test_multiple_inheritance_old(self):
+ """
+ Old multiple inheritance attributre collection behavior is retained.
+
+ See #285
+ """
+
+ @attr.s
+ class A(object):
+ a1 = attr.ib(default="a1")
+ a2 = attr.ib(default="a2")
+
+ @attr.s
+ class B(A):
+ b1 = attr.ib(default="b1")
+ b2 = attr.ib(default="b2")
+
+ @attr.s
+ class C(B, A):
+ c1 = attr.ib(default="c1")
+ c2 = attr.ib(default="c2")
+
+ @attr.s
+ class D(A):
+ d1 = attr.ib(default="d1")
+ d2 = attr.ib(default="d2")
+
+ @attr.s
+ class E(C, D):
+ e1 = attr.ib(default="e1")
+ e2 = attr.ib(default="e2")
+
+ assert (
+ "E(a1='a1', a2='a2', b1='b1', b2='b2', c1='c1', c2='c2', d1='d1', "
+ "d2='d2', e1='e1', e2='e2')"
+ ) == repr(E())
+
+ def test_overwrite_proper_mro(self):
+ """
+ The proper MRO path works single overwrites too.
+ """
+
+ @attr.s(collect_by_mro=True)
+ class C(object):
+ x = attr.ib(default=1)
+
+ @attr.s(collect_by_mro=True)
+ class D(C):
+ x = attr.ib(default=2)
+
+ assert "D(x=2)" == repr(D())
+
+ def test_multiple_inheritance_proper_mro(self):
+ """
+ Attributes are collected according to the MRO.
+
+ See #428
+ """
+
+ @attr.s
+ class A(object):
+ a1 = attr.ib(default="a1")
+ a2 = attr.ib(default="a2")
+
+ @attr.s
+ class B(A):
+ b1 = attr.ib(default="b1")
+ b2 = attr.ib(default="b2")
+
+ @attr.s
+ class C(B, A):
+ c1 = attr.ib(default="c1")
+ c2 = attr.ib(default="c2")
+
+ @attr.s
+ class D(A):
+ d1 = attr.ib(default="d1")
+ d2 = attr.ib(default="d2")
+
+ @attr.s(collect_by_mro=True)
+ class E(C, D):
+ e1 = attr.ib(default="e1")
+ e2 = attr.ib(default="e2")
+
+ assert (
+ "E(a1='a1', a2='a2', d1='d1', d2='d2', b1='b1', b2='b2', c1='c1', "
+ "c2='c2', e1='e1', e2='e2')"
+ ) == repr(E())
+
+ def test_mro(self):
+ """
+ Attributes and methods are looked up the same way.
+
+ See #428
+ """
+
+ @attr.s(collect_by_mro=True)
+ class A(object):
+
+ x = attr.ib(10)
+
+ def xx(self):
+ return 10
+
+ @attr.s(collect_by_mro=True)
+ class B(A):
+ y = attr.ib(20)
+
+ @attr.s(collect_by_mro=True)
+ class C(A):
+ x = attr.ib(50)
+
+ def xx(self):
+ return 50
+
+ @attr.s(collect_by_mro=True)
+ class D(B, C):
+ pass
+
+ d = D()
+
+ assert d.x == d.xx()
+
+ def test_inherited(self):
+ """
+ Inherited Attributes have `.inherited` True, otherwise False.
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ @attr.s
+ class B(A):
+ b = attr.ib()
+
+ @attr.s
+ class C(B):
+ a = attr.ib()
+ c = attr.ib()
+
+ f = attr.fields
+
+ assert False is f(A).a.inherited
+
+ assert True is f(B).a.inherited
+ assert False is f(B).b.inherited
+
+ assert False is f(C).a.inherited
+ assert True is f(C).b.inherited
+ assert False is f(C).c.inherited
+
+
+class TestAttributes(object):
+ """
+ Tests for the `attrs`/`attr.s` class decorator.
+ """
+
+ @pytest.mark.skipif(not PY2, reason="No old-style classes in Py3")
+ def test_catches_old_style(self):
+ """
+ Raises TypeError on old-style classes.
+ """
+ with pytest.raises(TypeError) as e:
+
+ @attr.s
+ class C:
+ pass
+
+ assert ("attrs only works with new-style classes.",) == e.value.args
+
+ def test_sets_attrs(self):
+ """
+ Sets the `__attrs_attrs__` class attribute with a list of `Attribute`s.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+
+ assert "x" == C.__attrs_attrs__[0].name
+ assert all(isinstance(a, Attribute) for a in C.__attrs_attrs__)
+
+ def test_empty(self):
+ """
+ No attributes, no problems.
+ """
+
+ @attr.s
+ class C3(object):
+ pass
+
+ assert "C3()" == repr(C3())
+ assert C3() == C3()
+
+ @given(attr=attrs_st, attr_name=sampled_from(Attribute.__slots__))
+ def test_immutable(self, attr, attr_name):
+ """
+ Attribute instances are immutable.
+ """
+ with pytest.raises(AttributeError):
+ setattr(attr, attr_name, 1)
+
+ @pytest.mark.parametrize(
+ "method_name", ["__repr__", "__eq__", "__hash__", "__init__"]
+ )
+ def test_adds_all_by_default(self, method_name):
+ """
+ If no further arguments are supplied, all add_XXX functions except
+ add_hash are applied. __hash__ is set to None.
+ """
+ # Set the method name to a sentinel and check whether it has been
+ # overwritten afterwards.
+ sentinel = object()
+
+ class C(object):
+ x = attr.ib()
+
+ setattr(C, method_name, sentinel)
+
+ C = attr.s(C)
+ meth = getattr(C, method_name)
+
+ assert sentinel != meth
+ if method_name == "__hash__":
+ assert meth is None
+
+ @pytest.mark.parametrize(
+ "arg_name, method_name",
+ [
+ ("repr", "__repr__"),
+ ("eq", "__eq__"),
+ ("order", "__le__"),
+ ("hash", "__hash__"),
+ ("init", "__init__"),
+ ],
+ )
+ def test_respects_add_arguments(self, arg_name, method_name):
+ """
+ If a certain `XXX` is `False`, `__XXX__` is not added to the class.
+ """
+ # Set the method name to a sentinel and check whether it has been
+ # overwritten afterwards.
+ sentinel = object()
+
+ am_args = {
+ "repr": True,
+ "eq": True,
+ "order": True,
+ "hash": True,
+ "init": True,
+ }
+ am_args[arg_name] = False
+ if arg_name == "eq":
+ am_args["order"] = False
+
+ class C(object):
+ x = attr.ib()
+
+ setattr(C, method_name, sentinel)
+
+ C = attr.s(**am_args)(C)
+
+ assert sentinel == getattr(C, method_name)
+
+ @pytest.mark.parametrize("init", [True, False])
+ def test_respects_init_attrs_init(self, init):
+ """
+ If init=False, adds __attrs_init__ to the class.
+ Otherwise, it does not.
+ """
+
+ class C(object):
+ x = attr.ib()
+
+ C = attr.s(init=init)(C)
+ assert hasattr(C, "__attrs_init__") != init
+
+ @pytest.mark.skipif(PY2, reason="__qualname__ is PY3-only.")
+ @given(slots_outer=booleans(), slots_inner=booleans())
+ def test_repr_qualname(self, slots_outer, slots_inner):
+ """
+ On Python 3, the name in repr is the __qualname__.
+ """
+
+ @attr.s(slots=slots_outer)
+ class C(object):
+ @attr.s(slots=slots_inner)
+ class D(object):
+ pass
+
+ assert "C.D()" == repr(C.D())
+ assert "GC.D()" == repr(GC.D())
+
+ @given(slots_outer=booleans(), slots_inner=booleans())
+ def test_repr_fake_qualname(self, slots_outer, slots_inner):
+ """
+ Setting repr_ns overrides a potentially guessed namespace.
+ """
+
+ @attr.s(slots=slots_outer)
+ class C(object):
+ @attr.s(repr_ns="C", slots=slots_inner)
+ class D(object):
+ pass
+
+ assert "C.D()" == repr(C.D())
+
+ @pytest.mark.skipif(PY2, reason="__qualname__ is PY3-only.")
+ @given(slots_outer=booleans(), slots_inner=booleans())
+ def test_name_not_overridden(self, slots_outer, slots_inner):
+ """
+ On Python 3, __name__ is different from __qualname__.
+ """
+
+ @attr.s(slots=slots_outer)
+ class C(object):
+ @attr.s(slots=slots_inner)
+ class D(object):
+ pass
+
+ assert C.D.__name__ == "D"
+ assert C.D.__qualname__ == C.__qualname__ + ".D"
+
+ @pytest.mark.parametrize("with_validation", [True, False])
+ def test_pre_init(self, with_validation, monkeypatch):
+ """
+ Verify that __attrs_pre_init__ gets called if defined.
+ """
+ monkeypatch.setattr(_config, "_run_validators", with_validation)
+
+ @attr.s
+ class C(object):
+ def __attrs_pre_init__(self2):
+ self2.z = 30
+
+ c = C()
+
+ assert 30 == getattr(c, "z", None)
+
+ @pytest.mark.parametrize("with_validation", [True, False])
+ def test_post_init(self, with_validation, monkeypatch):
+ """
+ Verify that __attrs_post_init__ gets called if defined.
+ """
+ monkeypatch.setattr(_config, "_run_validators", with_validation)
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ y = attr.ib()
+
+ def __attrs_post_init__(self2):
+ self2.z = self2.x + self2.y
+
+ c = C(x=10, y=20)
+
+ assert 30 == getattr(c, "z", None)
+
+ @pytest.mark.parametrize("with_validation", [True, False])
+ def test_pre_post_init_order(self, with_validation, monkeypatch):
+ """
+ Verify that __attrs_post_init__ gets called if defined.
+ """
+ monkeypatch.setattr(_config, "_run_validators", with_validation)
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+
+ def __attrs_pre_init__(self2):
+ self2.z = 30
+
+ def __attrs_post_init__(self2):
+ self2.z += self2.x
+
+ c = C(x=10)
+
+ assert 40 == getattr(c, "z", None)
+
+ def test_types(self):
+ """
+ Sets the `Attribute.type` attr from type argument.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(type=int)
+ y = attr.ib(type=str)
+ z = attr.ib()
+
+ assert int is fields(C).x.type
+ assert str is fields(C).y.type
+ assert None is fields(C).z.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_clean_class(self, slots):
+ """
+ Attribute definitions do not appear on the class body after @attr.s.
+ """
+
+ @attr.s(slots=slots)
+ class C(object):
+ x = attr.ib()
+
+ x = getattr(C, "x", None)
+
+ assert not isinstance(x, _CountingAttr)
+
+ def test_factory_sugar(self):
+ """
+ Passing factory=f is syntactic sugar for passing default=Factory(f).
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(factory=list)
+
+ assert Factory(list) == attr.fields(C).x.default
+
+ def test_sugar_factory_mutex(self):
+ """
+ Passing both default and factory raises ValueError.
+ """
+ with pytest.raises(ValueError, match="mutually exclusive"):
+
+ @attr.s
+ class C(object):
+ x = attr.ib(factory=list, default=Factory(list))
+
+ def test_sugar_callable(self):
+ """
+ Factory has to be a callable to prevent people from passing Factory
+ into it.
+ """
+ with pytest.raises(ValueError, match="must be a callable"):
+
+ @attr.s
+ class C(object):
+ x = attr.ib(factory=Factory(list))
+
+ def test_inherited_does_not_affect_hashing_and_equality(self):
+ """
+ Whether or not an Attribute has been inherited doesn't affect how it's
+ hashed and compared.
+ """
+
+ @attr.s
+ class BaseClass(object):
+ x = attr.ib()
+
+ @attr.s
+ class SubClass(BaseClass):
+ pass
+
+ ba = attr.fields(BaseClass)[0]
+ sa = attr.fields(SubClass)[0]
+
+ assert ba == sa
+ assert hash(ba) == hash(sa)
+
+
+class TestKeywordOnlyAttributes(object):
+ """
+ Tests for keyword-only attributes.
+ """
+
+ def test_adds_keyword_only_arguments(self):
+ """
+ Attributes can be added as keyword-only.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib()
+ b = attr.ib(default=2, kw_only=True)
+ c = attr.ib(kw_only=True)
+ d = attr.ib(default=attr.Factory(lambda: 4), kw_only=True)
+
+ c = C(1, c=3)
+
+ assert c.a == 1
+ assert c.b == 2
+ assert c.c == 3
+ assert c.d == 4
+
+ def test_ignores_kw_only_when_init_is_false(self):
+ """
+ Specifying ``kw_only=True`` when ``init=False`` is essentially a no-op.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(init=False, default=0, kw_only=True)
+ y = attr.ib()
+
+ c = C(1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_keyword_only_attributes_presence(self):
+ """
+ Raises `TypeError` when keyword-only arguments are
+ not specified.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(kw_only=True)
+
+ with pytest.raises(TypeError) as e:
+ C()
+
+ if PY2:
+ assert (
+ "missing required keyword-only argument: 'x'"
+ ) in e.value.args[0]
+ else:
+ assert (
+ "missing 1 required keyword-only argument: 'x'"
+ ) in e.value.args[0]
+
+ def test_keyword_only_attributes_unexpected(self):
+ """
+ Raises `TypeError` when unexpected keyword argument passed.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(kw_only=True)
+
+ with pytest.raises(TypeError) as e:
+ C(x=5, y=10)
+
+ assert "got an unexpected keyword argument 'y'" in e.value.args[0]
+
+ def test_keyword_only_attributes_can_come_in_any_order(self):
+ """
+ Mandatory vs non-mandatory attr order only matters when they are part
+ of the __init__ signature and when they aren't kw_only (which are
+ moved to the end and can be mandatory or non-mandatory in any order,
+ as they will be specified as keyword args anyway).
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib(kw_only=True)
+ b = attr.ib(kw_only=True, default="b")
+ c = attr.ib(kw_only=True)
+ d = attr.ib()
+ e = attr.ib(default="e")
+ f = attr.ib(kw_only=True)
+ g = attr.ib(kw_only=True, default="g")
+ h = attr.ib(kw_only=True)
+ i = attr.ib(init=False)
+
+ c = C("d", a="a", c="c", f="f", h="h")
+
+ assert c.a == "a"
+ assert c.b == "b"
+ assert c.c == "c"
+ assert c.d == "d"
+ assert c.e == "e"
+ assert c.f == "f"
+ assert c.g == "g"
+ assert c.h == "h"
+
+ def test_keyword_only_attributes_allow_subclassing(self):
+ """
+ Subclass can define keyword-only attributed without defaults,
+ when the base class has attributes with defaults.
+ """
+
+ @attr.s
+ class Base(object):
+ x = attr.ib(default=0)
+
+ @attr.s
+ class C(Base):
+ y = attr.ib(kw_only=True)
+
+ c = C(y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_keyword_only_class_level(self):
+ """
+ `kw_only` can be provided at the attr.s level, converting all
+ attributes to `kw_only.`
+ """
+
+ @attr.s(kw_only=True)
+ class C(object):
+ x = attr.ib()
+ y = attr.ib(kw_only=True)
+
+ with pytest.raises(TypeError):
+ C(0, y=1)
+
+ c = C(x=0, y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_keyword_only_class_level_subclassing(self):
+ """
+ Subclass `kw_only` propagates to attrs inherited from the base,
+ allowing non-default following default.
+ """
+
+ @attr.s
+ class Base(object):
+ x = attr.ib(default=0)
+
+ @attr.s(kw_only=True)
+ class C(Base):
+ y = attr.ib()
+
+ with pytest.raises(TypeError):
+ C(1)
+
+ c = C(x=0, y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_init_false_attribute_after_keyword_attribute(self):
+ """
+ A positional attribute cannot follow a `kw_only` attribute,
+ but an `init=False` attribute can because it won't appear
+ in `__init__`
+ """
+
+ @attr.s
+ class KwArgBeforeInitFalse(object):
+ kwarg = attr.ib(kw_only=True)
+ non_init_function_default = attr.ib(init=False)
+ non_init_keyword_default = attr.ib(
+ init=False, default="default-by-keyword"
+ )
+
+ @non_init_function_default.default
+ def _init_to_init(self):
+ return self.kwarg + "b"
+
+ c = KwArgBeforeInitFalse(kwarg="a")
+
+ assert c.kwarg == "a"
+ assert c.non_init_function_default == "ab"
+ assert c.non_init_keyword_default == "default-by-keyword"
+
+ def test_init_false_attribute_after_keyword_attribute_with_inheritance(
+ self,
+ ):
+ """
+ A positional attribute cannot follow a `kw_only` attribute,
+ but an `init=False` attribute can because it won't appear
+ in `__init__`. This test checks that we allow this
+ even when the `kw_only` attribute appears in a parent class
+ """
+
+ @attr.s
+ class KwArgBeforeInitFalseParent(object):
+ kwarg = attr.ib(kw_only=True)
+
+ @attr.s
+ class KwArgBeforeInitFalseChild(KwArgBeforeInitFalseParent):
+ non_init_function_default = attr.ib(init=False)
+ non_init_keyword_default = attr.ib(
+ init=False, default="default-by-keyword"
+ )
+
+ @non_init_function_default.default
+ def _init_to_init(self):
+ return self.kwarg + "b"
+
+ c = KwArgBeforeInitFalseChild(kwarg="a")
+
+ assert c.kwarg == "a"
+ assert c.non_init_function_default == "ab"
+ assert c.non_init_keyword_default == "default-by-keyword"
+
+
+@pytest.mark.skipif(not PY2, reason="PY2-specific keyword-only error behavior")
+class TestKeywordOnlyAttributesOnPy2(object):
+ """
+ Tests for keyword-only attribute behavior on py2.
+ """
+
+ def test_no_init(self):
+ """
+ Keyworld-only is a no-op, not any error, if ``init=false``.
+ """
+
+ @attr.s(kw_only=True, init=False)
+ class ClassLevel(object):
+ a = attr.ib()
+
+ @attr.s(init=False)
+ class AttrLevel(object):
+ a = attr.ib(kw_only=True)
+
+
+@attr.s
+class GC(object):
+ @attr.s
+ class D(object):
+ pass
+
+
+class TestMakeClass(object):
+ """
+ Tests for `make_class`.
+ """
+
+ @pytest.mark.parametrize("ls", [list, tuple])
+ def test_simple(self, ls):
+ """
+ Passing a list of strings creates attributes with default args.
+ """
+ C1 = make_class("C1", ls(["a", "b"]))
+
+ @attr.s
+ class C2(object):
+ a = attr.ib()
+ b = attr.ib()
+
+ assert C1.__attrs_attrs__ == C2.__attrs_attrs__
+
+ def test_dict(self):
+ """
+ Passing a dict of name: _CountingAttr creates an equivalent class.
+ """
+ C1 = make_class(
+ "C1", {"a": attr.ib(default=42), "b": attr.ib(default=None)}
+ )
+
+ @attr.s
+ class C2(object):
+ a = attr.ib(default=42)
+ b = attr.ib(default=None)
+
+ assert C1.__attrs_attrs__ == C2.__attrs_attrs__
+
+ def test_attr_args(self):
+ """
+ attributes_arguments are passed to attributes
+ """
+ C = make_class("C", ["x"], repr=False)
+
+ assert repr(C(1)).startswith("<tests.test_make.C object at 0x")
+
+ def test_catches_wrong_attrs_type(self):
+ """
+ Raise `TypeError` if an invalid type for attrs is passed.
+ """
+ with pytest.raises(TypeError) as e:
+ make_class("C", object())
+
+ assert ("attrs argument must be a dict or a list.",) == e.value.args
+
+ def test_bases(self):
+ """
+ Parameter bases default to (object,) and subclasses correctly
+ """
+
+ class D(object):
+ pass
+
+ cls = make_class("C", {})
+
+ assert cls.__mro__[-1] == object
+
+ cls = make_class("C", {}, bases=(D,))
+
+ assert D in cls.__mro__
+ assert isinstance(cls(), D)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_clean_class(self, slots):
+ """
+ Attribute definitions do not appear on the class body.
+ """
+ C = make_class("C", ["x"], slots=slots)
+
+ x = getattr(C, "x", None)
+
+ assert not isinstance(x, _CountingAttr)
+
+ def test_missing_sys_getframe(self, monkeypatch):
+ """
+ `make_class()` does not fail when `sys._getframe()` is not available.
+ """
+ monkeypatch.delattr(sys, "_getframe")
+ C = make_class("C", ["x"])
+
+ assert 1 == len(C.__attrs_attrs__)
+
+ def test_make_class_ordered(self):
+ """
+ If `make_class()` is passed ordered attrs, their order is respected
+ instead of the counter.
+ """
+ b = attr.ib(default=2)
+ a = attr.ib(default=1)
+
+ C = attr.make_class("C", ordered_dict([("a", a), ("b", b)]))
+
+ assert "C(a=1, b=2)" == repr(C())
+
+ @pytest.mark.skipif(PY2, reason="Python 3-only")
+ def test_generic_dynamic_class(self):
+ """
+ make_class can create generic dynamic classes.
+
+ https://github.com/python-attrs/attrs/issues/756
+ https://bugs.python.org/issue33188
+ """
+ from types import new_class
+ from typing import Generic, TypeVar
+
+ MyTypeVar = TypeVar("MyTypeVar")
+ MyParent = new_class("MyParent", (Generic[MyTypeVar],), {})
+
+ attr.make_class("test", {"id": attr.ib(type=str)}, (MyParent[int],))
+
+
+class TestFields(object):
+ """
+ Tests for `fields`.
+ """
+
+ @given(simple_classes())
+ def test_instance(self, C):
+ """
+ Raises `TypeError` on non-classes.
+ """
+ with pytest.raises(TypeError) as e:
+ fields(C())
+
+ assert "Passed object must be a class." == e.value.args[0]
+
+ def test_handler_non_attrs_class(self):
+ """
+ Raises `ValueError` if passed a non-``attrs`` instance.
+ """
+ with pytest.raises(NotAnAttrsClassError) as e:
+ fields(object)
+
+ assert (
+ "{o!r} is not an attrs-decorated class.".format(o=object)
+ ) == e.value.args[0]
+
+ @given(simple_classes())
+ def test_fields(self, C):
+ """
+ Returns a list of `Attribute`a.
+ """
+ assert all(isinstance(a, Attribute) for a in fields(C))
+
+ @given(simple_classes())
+ def test_fields_properties(self, C):
+ """
+ Fields returns a tuple with properties.
+ """
+ for attribute in fields(C):
+ assert getattr(fields(C), attribute.name) is attribute
+
+
+class TestFieldsDict(object):
+ """
+ Tests for `fields_dict`.
+ """
+
+ @given(simple_classes())
+ def test_instance(self, C):
+ """
+ Raises `TypeError` on non-classes.
+ """
+ with pytest.raises(TypeError) as e:
+ fields_dict(C())
+
+ assert "Passed object must be a class." == e.value.args[0]
+
+ def test_handler_non_attrs_class(self):
+ """
+ Raises `ValueError` if passed a non-``attrs`` instance.
+ """
+ with pytest.raises(NotAnAttrsClassError) as e:
+ fields_dict(object)
+
+ assert (
+ "{o!r} is not an attrs-decorated class.".format(o=object)
+ ) == e.value.args[0]
+
+ @given(simple_classes())
+ def test_fields_dict(self, C):
+ """
+ Returns an ordered dict of ``{attribute_name: Attribute}``.
+ """
+ d = fields_dict(C)
+
+ assert isinstance(d, ordered_dict)
+ assert list(fields(C)) == list(d.values())
+ assert [a.name for a in fields(C)] == [field_name for field_name in d]
+
+
+class TestConverter(object):
+ """
+ Tests for attribute conversion.
+ """
+
+ def test_convert(self):
+ """
+ Return value of converter is used as the attribute's value.
+ """
+ C = make_class(
+ "C", {"x": attr.ib(converter=lambda v: v + 1), "y": attr.ib()}
+ )
+ c = C(1, 2)
+
+ assert c.x == 2
+ assert c.y == 2
+
+ @given(integers(), booleans())
+ def test_convert_property(self, val, init):
+ """
+ Property tests for attributes using converter.
+ """
+ C = make_class(
+ "C",
+ {
+ "y": attr.ib(),
+ "x": attr.ib(
+ init=init, default=val, converter=lambda v: v + 1
+ ),
+ },
+ )
+ c = C(2)
+
+ assert c.x == val + 1
+ assert c.y == 2
+
+ @given(integers(), booleans())
+ def test_converter_factory_property(self, val, init):
+ """
+ Property tests for attributes with converter, and a factory default.
+ """
+ C = make_class(
+ "C",
+ ordered_dict(
+ [
+ ("y", attr.ib()),
+ (
+ "x",
+ attr.ib(
+ init=init,
+ default=Factory(lambda: val),
+ converter=lambda v: v + 1,
+ ),
+ ),
+ ]
+ ),
+ )
+ c = C(2)
+
+ assert c.x == val + 1
+ assert c.y == 2
+
+ def test_factory_takes_self(self):
+ """
+ If takes_self on factories is True, self is passed.
+ """
+ C = make_class(
+ "C",
+ {
+ "x": attr.ib(
+ default=Factory((lambda self: self), takes_self=True)
+ )
+ },
+ )
+
+ i = C()
+
+ assert i is i.x
+
+ def test_factory_hashable(self):
+ """
+ Factory is hashable.
+ """
+ assert hash(Factory(None, False)) == hash(Factory(None, False))
+
+ def test_convert_before_validate(self):
+ """
+ Validation happens after conversion.
+ """
+
+ def validator(inst, attr, val):
+ raise RuntimeError("foo")
+
+ C = make_class(
+ "C",
+ {
+ "x": attr.ib(validator=validator, converter=lambda v: 1 / 0),
+ "y": attr.ib(),
+ },
+ )
+ with pytest.raises(ZeroDivisionError):
+ C(1, 2)
+
+ def test_frozen(self):
+ """
+ Converters circumvent immutability.
+ """
+ C = make_class(
+ "C", {"x": attr.ib(converter=lambda v: int(v))}, frozen=True
+ )
+ C("1")
+
+
+class TestValidate(object):
+ """
+ Tests for `validate`.
+ """
+
+ def test_success(self):
+ """
+ If the validator succeeds, nothing gets raised.
+ """
+ C = make_class(
+ "C", {"x": attr.ib(validator=lambda *a: None), "y": attr.ib()}
+ )
+ validate(C(1, 2))
+
+ def test_propagates(self):
+ """
+ The exception of the validator is handed through.
+ """
+
+ def raiser(_, __, value):
+ if value == 42:
+ raise FloatingPointError
+
+ C = make_class("C", {"x": attr.ib(validator=raiser)})
+ i = C(1)
+ i.x = 42
+
+ with pytest.raises(FloatingPointError):
+ validate(i)
+
+ def test_run_validators(self):
+ """
+ Setting `_run_validators` to False prevents validators from running.
+ """
+ _config._run_validators = False
+ obj = object()
+
+ def raiser(_, __, ___):
+ raise Exception(obj)
+
+ C = make_class("C", {"x": attr.ib(validator=raiser)})
+ c = C(1)
+ validate(c)
+ assert 1 == c.x
+ _config._run_validators = True
+
+ with pytest.raises(Exception):
+ validate(c)
+
+ with pytest.raises(Exception) as e:
+ C(1)
+ assert (obj,) == e.value.args
+
+ def test_multiple_validators(self):
+ """
+ If a list is passed as a validator, all of its items are treated as one
+ and must pass.
+ """
+
+ def v1(_, __, value):
+ if value == 23:
+ raise TypeError("omg")
+
+ def v2(_, __, value):
+ if value == 42:
+ raise ValueError("omg")
+
+ C = make_class("C", {"x": attr.ib(validator=[v1, v2])})
+
+ validate(C(1))
+
+ with pytest.raises(TypeError) as e:
+ C(23)
+
+ assert "omg" == e.value.args[0]
+
+ with pytest.raises(ValueError) as e:
+ C(42)
+
+ assert "omg" == e.value.args[0]
+
+ def test_multiple_empty(self):
+ """
+ Empty list/tuple for validator is the same as None.
+ """
+ C1 = make_class("C", {"x": attr.ib(validator=[])})
+ C2 = make_class("C", {"x": attr.ib(validator=None)})
+
+ assert inspect.getsource(C1.__init__) == inspect.getsource(C2.__init__)
+
+
+# Hypothesis seems to cache values, so the lists of attributes come out
+# unsorted.
+sorted_lists_of_attrs = list_of_attrs.map(
+ lambda l: sorted(l, key=attrgetter("counter"))
+)
+
+
+class TestMetadata(object):
+ """
+ Tests for metadata handling.
+ """
+
+ @given(sorted_lists_of_attrs)
+ def test_metadata_present(self, list_of_attrs):
+ """
+ Assert dictionaries are copied and present.
+ """
+ C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
+
+ for hyp_attr, class_attr in zip(list_of_attrs, fields(C)):
+ if hyp_attr.metadata is None:
+ # The default is a singleton empty dict.
+ assert class_attr.metadata is not None
+ assert len(class_attr.metadata) == 0
+ else:
+ assert hyp_attr.metadata == class_attr.metadata
+
+ # Once more, just to assert getting items and iteration.
+ for k in class_attr.metadata:
+ assert hyp_attr.metadata[k] == class_attr.metadata[k]
+ assert hyp_attr.metadata.get(k) == class_attr.metadata.get(
+ k
+ )
+
+ @given(simple_classes(), text())
+ def test_metadata_immutability(self, C, string):
+ """
+ The metadata dict should be best-effort immutable.
+ """
+ for a in fields(C):
+ with pytest.raises(TypeError):
+ a.metadata[string] = string
+ with pytest.raises(AttributeError):
+ a.metadata.update({string: string})
+ with pytest.raises(AttributeError):
+ a.metadata.clear()
+ with pytest.raises(AttributeError):
+ a.metadata.setdefault(string, string)
+
+ for k in a.metadata:
+ # For some reason, Python 3's MappingProxyType throws an
+ # IndexError for deletes on a large integer key.
+ with pytest.raises((TypeError, IndexError)):
+ del a.metadata[k]
+ with pytest.raises(AttributeError):
+ a.metadata.pop(k)
+ with pytest.raises(AttributeError):
+ a.metadata.popitem()
+
+ @given(lists(simple_attrs_without_metadata, min_size=2, max_size=5))
+ def test_empty_metadata_singleton(self, list_of_attrs):
+ """
+ All empty metadata attributes share the same empty metadata dict.
+ """
+ C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
+ for a in fields(C)[1:]:
+ assert a.metadata is fields(C)[0].metadata
+
+ @given(lists(simple_attrs_without_metadata, min_size=2, max_size=5))
+ def test_empty_countingattr_metadata_independent(self, list_of_attrs):
+ """
+ All empty metadata attributes are independent before ``@attr.s``.
+ """
+ for x, y in itertools.combinations(list_of_attrs, 2):
+ assert x.metadata is not y.metadata
+
+ @given(lists(simple_attrs_with_metadata(), min_size=2, max_size=5))
+ def test_not_none_metadata(self, list_of_attrs):
+ """
+ Non-empty metadata attributes exist as fields after ``@attr.s``.
+ """
+ C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
+
+ assert len(fields(C)) > 0
+
+ for cls_a, raw_a in zip(fields(C), list_of_attrs):
+ assert cls_a.metadata != {}
+ assert cls_a.metadata == raw_a.metadata
+
+ def test_metadata(self):
+ """
+ If metadata that is not None is passed, it is used.
+
+ This is necessary for coverage because the previous test is
+ hypothesis-based.
+ """
+ md = {}
+ a = attr.ib(metadata=md)
+
+ assert md is a.metadata
+
+
+class TestClassBuilder(object):
+ """
+ Tests for `_ClassBuilder`.
+ """
+
+ def test_repr_str(self):
+ """
+ Trying to add a `__str__` without having a `__repr__` raises a
+ ValueError.
+ """
+ with pytest.raises(ValueError) as ei:
+ make_class("C", {}, repr=False, str=True)
+
+ assert (
+ "__str__ can only be generated if a __repr__ exists.",
+ ) == ei.value.args
+
+ def test_repr(self):
+ """
+ repr of builder itself makes sense.
+ """
+
+ class C(object):
+ pass
+
+ b = _ClassBuilder(
+ C,
+ None,
+ True,
+ True,
+ False,
+ False,
+ False,
+ False,
+ False,
+ False,
+ True,
+ None,
+ False,
+ None,
+ )
+
+ assert "<_ClassBuilder(cls=C)>" == repr(b)
+
+ def test_returns_self(self):
+ """
+ All methods return the builder for chaining.
+ """
+
+ class C(object):
+ x = attr.ib()
+
+ b = _ClassBuilder(
+ C,
+ None,
+ True,
+ True,
+ False,
+ False,
+ False,
+ False,
+ False,
+ False,
+ True,
+ None,
+ False,
+ None,
+ )
+
+ cls = (
+ b.add_eq()
+ .add_order()
+ .add_hash()
+ .add_init()
+ .add_attrs_init()
+ .add_repr("ns")
+ .add_str()
+ .build_class()
+ )
+
+ assert "ns.C(x=1)" == repr(cls(1))
+
+ @pytest.mark.parametrize(
+ "meth_name",
+ [
+ "__init__",
+ "__hash__",
+ "__repr__",
+ "__str__",
+ "__eq__",
+ "__ne__",
+ "__lt__",
+ "__le__",
+ "__gt__",
+ "__ge__",
+ ],
+ )
+ def test_attaches_meta_dunders(self, meth_name):
+ """
+ Generated methods have correct __module__, __name__, and __qualname__
+ attributes.
+ """
+
+ @attr.s(hash=True, str=True)
+ class C(object):
+ def organic(self):
+ pass
+
+ @attr.s(hash=True, str=True)
+ class D(object):
+ pass
+
+ meth_C = getattr(C, meth_name)
+ meth_D = getattr(D, meth_name)
+
+ assert meth_name == meth_C.__name__ == meth_D.__name__
+ assert C.organic.__module__ == meth_C.__module__ == meth_D.__module__
+ if not PY2:
+ # This is assertion that would fail if a single __ne__ instance
+ # was reused across multiple _make_eq calls.
+ organic_prefix = C.organic.__qualname__.rsplit(".", 1)[0]
+ assert organic_prefix + "." + meth_name == meth_C.__qualname__
+
+ def test_handles_missing_meta_on_class(self):
+ """
+ If the class hasn't a __module__ or __qualname__, the method hasn't
+ either.
+ """
+
+ class C(object):
+ pass
+
+ b = _ClassBuilder(
+ C,
+ these=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ getstate_setstate=False,
+ auto_attribs=False,
+ is_exc=False,
+ kw_only=False,
+ cache_hash=False,
+ collect_by_mro=True,
+ on_setattr=None,
+ has_custom_setattr=False,
+ field_transformer=None,
+ )
+ b._cls = {} # no __module__; no __qualname__
+
+ def fake_meth(self):
+ pass
+
+ fake_meth.__module__ = "42"
+ fake_meth.__qualname__ = "23"
+
+ rv = b._add_method_dunders(fake_meth)
+
+ assert "42" == rv.__module__ == fake_meth.__module__
+ assert "23" == rv.__qualname__ == fake_meth.__qualname__
+
+ def test_weakref_setstate(self):
+ """
+ __weakref__ is not set on in setstate because it's not writable in
+ slotted classes.
+ """
+
+ @attr.s(slots=True)
+ class C(object):
+ __weakref__ = attr.ib(
+ init=False, hash=False, repr=False, eq=False, order=False
+ )
+
+ assert C() == copy.deepcopy(C())
+
+ def test_no_references_to_original(self):
+ """
+ When subclassing a slotted class, there are no stray references to the
+ original class.
+ """
+
+ @attr.s(slots=True)
+ class C(object):
+ pass
+
+ @attr.s(slots=True)
+ class C2(C):
+ pass
+
+ # The original C2 is in a reference cycle, so force a collect:
+ gc.collect()
+
+ assert [C2] == C.__subclasses__()
+
+ def _get_copy_kwargs(include_slots=True):
+ """
+ Generate a list of compatible attr.s arguments for the `copy` tests.
+ """
+ options = ["frozen", "hash", "cache_hash"]
+
+ if include_slots:
+ options.extend(["slots", "weakref_slot"])
+
+ out_kwargs = []
+ for args in itertools.product([True, False], repeat=len(options)):
+ kwargs = dict(zip(options, args))
+
+ kwargs["hash"] = kwargs["hash"] or None
+
+ if kwargs["cache_hash"] and not (
+ kwargs["frozen"] or kwargs["hash"]
+ ):
+ continue
+
+ out_kwargs.append(kwargs)
+
+ return out_kwargs
+
+ @pytest.mark.parametrize("kwargs", _get_copy_kwargs())
+ def test_copy(self, kwargs):
+ """
+ Ensure that an attrs class can be copied successfully.
+ """
+
+ @attr.s(eq=True, **kwargs)
+ class C(object):
+ x = attr.ib()
+
+ a = C(1)
+ b = copy.deepcopy(a)
+
+ assert a == b
+
+ @pytest.mark.parametrize("kwargs", _get_copy_kwargs(include_slots=False))
+ def test_copy_custom_setstate(self, kwargs):
+ """
+ Ensure that non-slots classes respect a custom __setstate__.
+ """
+
+ @attr.s(eq=True, **kwargs)
+ class C(object):
+ x = attr.ib()
+
+ def __getstate__(self):
+ return self.__dict__
+
+ def __setstate__(self, state):
+ state["x"] *= 5
+ self.__dict__.update(state)
+
+ expected = C(25)
+ actual = copy.copy(C(5))
+
+ assert actual == expected
+
+
+class TestMakeOrder:
+ """
+ Tests for _make_order().
+ """
+
+ def test_subclasses_cannot_be_compared(self):
+ """
+ Calling comparison methods on subclasses raises a TypeError.
+
+ We use the actual operation so we get an error raised on Python 3.
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ @attr.s
+ class B(A):
+ pass
+
+ a = A(42)
+ b = B(42)
+
+ assert a <= a
+ assert a >= a
+ assert not a < a
+ assert not a > a
+
+ assert (
+ NotImplemented
+ == a.__lt__(b)
+ == a.__le__(b)
+ == a.__gt__(b)
+ == a.__ge__(b)
+ )
+
+ if not PY2:
+ with pytest.raises(TypeError):
+ a <= b
+
+ with pytest.raises(TypeError):
+ a >= b
+
+ with pytest.raises(TypeError):
+ a < b
+
+ with pytest.raises(TypeError):
+ a > b
+
+
+class TestDetermineAttrsEqOrder(object):
+ def test_default(self):
+ """
+ If all are set to None, set both eq and order to the passed default.
+ """
+ assert (42, 42) == _determine_attrs_eq_order(None, None, None, 42)
+
+ @pytest.mark.parametrize("eq", [True, False])
+ def test_order_mirrors_eq_by_default(self, eq):
+ """
+ If order is None, it mirrors eq.
+ """
+ assert (eq, eq) == _determine_attrs_eq_order(None, eq, None, True)
+
+ def test_order_without_eq(self):
+ """
+ eq=False, order=True raises a meaningful ValueError.
+ """
+ with pytest.raises(
+ ValueError, match="`order` can only be True if `eq` is True too."
+ ):
+ _determine_attrs_eq_order(None, False, True, True)
+
+ @given(cmp=booleans(), eq=optional_bool, order=optional_bool)
+ def test_mix(self, cmp, eq, order):
+ """
+ If cmp is not None, eq and order must be None and vice versa.
+ """
+ assume(eq is not None or order is not None)
+
+ with pytest.raises(
+ ValueError, match="Don't mix `cmp` with `eq' and `order`."
+ ):
+ _determine_attrs_eq_order(cmp, eq, order, True)
+
+
+class TestDetermineAttribEqOrder(object):
+ def test_default(self):
+ """
+ If all are set to None, set both eq and order to the passed default.
+ """
+ assert (42, None, 42, None) == _determine_attrib_eq_order(
+ None, None, None, 42
+ )
+
+ def test_eq_callable_order_boolean(self):
+ """
+ eq=callable or order=callable need to transformed into eq/eq_key
+ or order/order_key.
+ """
+ assert (True, str.lower, False, None) == _determine_attrib_eq_order(
+ None, str.lower, False, True
+ )
+
+ def test_eq_callable_order_callable(self):
+ """
+ eq=callable or order=callable need to transformed into eq/eq_key
+ or order/order_key.
+ """
+ assert (True, str.lower, True, abs) == _determine_attrib_eq_order(
+ None, str.lower, abs, True
+ )
+
+ def test_eq_boolean_order_callable(self):
+ """
+ eq=callable or order=callable need to transformed into eq/eq_key
+ or order/order_key.
+ """
+ assert (True, None, True, str.lower) == _determine_attrib_eq_order(
+ None, True, str.lower, True
+ )
+
+ @pytest.mark.parametrize("eq", [True, False])
+ def test_order_mirrors_eq_by_default(self, eq):
+ """
+ If order is None, it mirrors eq.
+ """
+ assert (eq, None, eq, None) == _determine_attrib_eq_order(
+ None, eq, None, True
+ )
+
+ def test_order_without_eq(self):
+ """
+ eq=False, order=True raises a meaningful ValueError.
+ """
+ with pytest.raises(
+ ValueError, match="`order` can only be True if `eq` is True too."
+ ):
+ _determine_attrib_eq_order(None, False, True, True)
+
+ @given(cmp=booleans(), eq=optional_bool, order=optional_bool)
+ def test_mix(self, cmp, eq, order):
+ """
+ If cmp is not None, eq and order must be None and vice versa.
+ """
+ assume(eq is not None or order is not None)
+
+ with pytest.raises(
+ ValueError, match="Don't mix `cmp` with `eq' and `order`."
+ ):
+ _determine_attrib_eq_order(cmp, eq, order, True)
+
+
+class TestDocs:
+ @pytest.mark.parametrize(
+ "meth_name",
+ [
+ "__init__",
+ "__repr__",
+ "__eq__",
+ "__ne__",
+ "__lt__",
+ "__le__",
+ "__gt__",
+ "__ge__",
+ ],
+ )
+ def test_docs(self, meth_name):
+ """
+ Tests the presence and correctness of the documentation
+ for the generated methods
+ """
+
+ @attr.s
+ class A(object):
+ pass
+
+ if hasattr(A, "__qualname__"):
+ method = getattr(A, meth_name)
+ expected = "Method generated by attrs for class {}.".format(
+ A.__qualname__
+ )
+ assert expected == method.__doc__
+
+
+@pytest.mark.skipif(not PY2, reason="Needs to be only caught on Python 2.")
+def test_auto_detect_raises_on_py2():
+ """
+ Trying to pass auto_detect=True to attr.s raises PythonTooOldError.
+ """
+ with pytest.raises(PythonTooOldError):
+ attr.s(auto_detect=True)
+
+
+class BareC(object):
+ pass
+
+
+class BareSlottedC(object):
+ __slots__ = ()
+
+
+@pytest.mark.skipif(PY2, reason="Auto-detection is Python 3-only.")
+class TestAutoDetect:
+ @pytest.mark.parametrize("C", (BareC, BareSlottedC))
+ def test_determine_detects_non_presence_correctly(self, C):
+ """
+ On an empty class, nothing should be detected.
+ """
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__init__",)
+ )
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__repr__",)
+ )
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__eq__", "__ne__")
+ )
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__le__", "__lt__", "__ge__", "__gt__")
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_make_all_by_default(self, slots, frozen):
+ """
+ If nothing is there to be detected, imply init=True, repr=True,
+ hash=None, eq=True, order=True.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ i = C(1)
+ o = object()
+
+ assert i.__init__ is not o.__init__
+ assert i.__repr__ is not o.__repr__
+ assert i.__eq__ is not o.__eq__
+ assert i.__ne__ is not o.__ne__
+ assert i.__le__ is not o.__le__
+ assert i.__lt__ is not o.__lt__
+ assert i.__ge__ is not o.__ge__
+ assert i.__gt__ is not o.__gt__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_init(self, slots, frozen):
+ """
+ If auto_detect=True and an __init__ exists, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class CI(object):
+ x = attr.ib()
+
+ def __init__(self):
+ object.__setattr__(self, "x", 42)
+
+ assert 42 == CI().x
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_repr(self, slots, frozen):
+ """
+ If auto_detect=True and an __repr__ exists, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __repr__(self):
+ return "hi"
+
+ assert "hi" == repr(C(42))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_hash(self, slots, frozen):
+ """
+ If auto_detect=True and an __hash__ exists, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __hash__(self):
+ return 0xC0FFEE
+
+ assert 0xC0FFEE == hash(C(42))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_eq(self, slots, frozen):
+ """
+ If auto_detect=True and an __eq__ or an __ne__, exist, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __eq__(self, o):
+ raise ValueError("worked")
+
+ with pytest.raises(ValueError, match="worked"):
+ C(1) == C(1)
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class D(object):
+ x = attr.ib()
+
+ def __ne__(self, o):
+ raise ValueError("worked")
+
+ with pytest.raises(ValueError, match="worked"):
+ D(1) != D(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_order(self, slots, frozen):
+ """
+ If auto_detect=True and an __ge__, __gt__, __le__, or and __lt__ exist,
+ don't write one.
+
+ It's surprisingly difficult to test this programmatically, so we do it
+ by hand.
+ """
+
+ def assert_not_set(cls, ex, meth_name):
+ __tracebackhide__ = True
+
+ a = getattr(cls, meth_name)
+ if meth_name == ex:
+ assert a == 42
+ else:
+ assert a is getattr(object, meth_name)
+
+ def assert_none_set(cls, ex):
+ __tracebackhide__ = True
+
+ for m in ("le", "lt", "ge", "gt"):
+ assert_not_set(cls, ex, "__" + m + "__")
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class LE(object):
+ __le__ = 42
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class LT(object):
+ __lt__ = 42
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class GE(object):
+ __ge__ = 42
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class GT(object):
+ __gt__ = 42
+
+ assert_none_set(LE, "__le__")
+ assert_none_set(LT, "__lt__")
+ assert_none_set(GE, "__ge__")
+ assert_none_set(GT, "__gt__")
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_init(self, slots, frozen):
+ """
+ If init=True is passed, ignore __init__.
+ """
+
+ @attr.s(init=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __init__(self):
+ pytest.fail("should not be called")
+
+ assert C(1) == C(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_repr(self, slots, frozen):
+ """
+ If repr=True is passed, ignore __repr__.
+ """
+
+ @attr.s(repr=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __repr__(self):
+ pytest.fail("should not be called")
+
+ assert "C(x=1)" == repr(C(1))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_hash(self, slots, frozen):
+ """
+ If hash=True is passed, ignore __hash__.
+ """
+
+ @attr.s(hash=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __hash__(self):
+ pytest.fail("should not be called")
+
+ assert hash(C(1))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_eq(self, slots, frozen):
+ """
+ If eq=True is passed, ignore __eq__ and __ne__.
+ """
+
+ @attr.s(eq=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __eq__(self, o):
+ pytest.fail("should not be called")
+
+ def __ne__(self, o):
+ pytest.fail("should not be called")
+
+ assert C(1) == C(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ @pytest.mark.parametrize(
+ "eq,order,cmp",
+ [
+ (True, None, None),
+ (True, True, None),
+ (None, True, None),
+ (None, None, True),
+ ],
+ )
+ def test_override_order(self, slots, frozen, eq, order, cmp):
+ """
+ If order=True is passed, ignore __le__, __lt__, __gt__, __ge__.
+
+ eq=True and cmp=True both imply order=True so test it too.
+ """
+
+ def meth(self, o):
+ pytest.fail("should not be called")
+
+ @attr.s(
+ cmp=cmp,
+ order=order,
+ eq=eq,
+ auto_detect=True,
+ slots=slots,
+ frozen=frozen,
+ )
+ class C(object):
+ x = attr.ib()
+ __le__ = __lt__ = __gt__ = __ge__ = meth
+
+ assert C(1) < C(2)
+ assert C(1) <= C(2)
+ assert C(2) > C(1)
+ assert C(2) >= C(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("first", [True, False])
+ def test_total_ordering(self, slots, first):
+ """
+ functools.total_ordering works as expected if an order method and an eq
+ method are detected.
+
+ Ensure the order doesn't matter.
+ """
+
+ class C(object):
+ x = attr.ib()
+ own_eq_called = attr.ib(default=False)
+ own_le_called = attr.ib(default=False)
+
+ def __eq__(self, o):
+ self.own_eq_called = True
+ return self.x == o.x
+
+ def __le__(self, o):
+ self.own_le_called = True
+ return self.x <= o.x
+
+ if first:
+ C = functools.total_ordering(
+ attr.s(auto_detect=True, slots=slots)(C)
+ )
+ else:
+ C = attr.s(auto_detect=True, slots=slots)(
+ functools.total_ordering(C)
+ )
+
+ c1, c2 = C(1), C(2)
+
+ assert c1 < c2
+ assert c1.own_le_called
+
+ c1, c2 = C(1), C(2)
+
+ assert c2 > c1
+ assert c2.own_le_called
+
+ c1, c2 = C(1), C(2)
+
+ assert c2 != c1
+ assert c1 == c1
+
+ assert c1.own_eq_called
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_detects_setstate_getstate(self, slots):
+ """
+ __getstate__ and __setstate__ are not overwritten if either is present.
+ """
+
+ @attr.s(slots=slots, auto_detect=True)
+ class C(object):
+ def __getstate__(self):
+ return ("hi",)
+
+ assert None is getattr(C(), "__setstate__", None)
+
+ @attr.s(slots=slots, auto_detect=True)
+ class C(object):
+ called = attr.ib(False)
+
+ def __setstate__(self, state):
+ self.called = True
+
+ i = C()
+
+ assert False is i.called
+
+ i.__setstate__(())
+
+ assert True is i.called
+ assert None is getattr(C(), "__getstate__", None)
+
+ @pytest.mark.skipif(PY310, reason="Pre-3.10 only.")
+ def test_match_args_pre_310(self):
+ """
+ __match_args__ is not created on Python versions older than 3.10.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib()
+
+ assert None is getattr(C, "__match_args__", None)
+
+
+@pytest.mark.skipif(not PY310, reason="Structural pattern matching is 3.10+")
+class TestMatchArgs(object):
+ """
+ Tests for match_args and __match_args__ generation.
+ """
+
+ def test_match_args(self):
+ """
+ __match_args__ is created by default on Python 3.10.
+ """
+
+ @attr.define
+ class C:
+ a = attr.field()
+
+ assert ("a",) == C.__match_args__
+
+ def test_explicit_match_args(self):
+ """
+ A custom __match_args__ set is not overwritten.
+ """
+
+ ma = ()
+
+ @attr.define
+ class C:
+ a = attr.field()
+ __match_args__ = ma
+
+ assert C(42).__match_args__ is ma
+
+ @pytest.mark.parametrize("match_args", [True, False])
+ def test_match_args_attr_set(self, match_args):
+ """
+ __match_args__ is set depending on match_args.
+ """
+
+ @attr.define(match_args=match_args)
+ class C:
+ a = attr.field()
+
+ if match_args:
+ assert hasattr(C, "__match_args__")
+ else:
+ assert not hasattr(C, "__match_args__")
+
+ def test_match_args_kw_only(self):
+ """
+ kw_only classes don't generate __match_args__.
+ kw_only fields are not included in __match_args__.
+ """
+
+ @attr.define
+ class C:
+ a = attr.field(kw_only=True)
+ b = attr.field()
+
+ assert C.__match_args__ == ("b",)
+
+ @attr.define(kw_only=True)
+ class C:
+ a = attr.field()
+ b = attr.field()
+
+ assert C.__match_args__ == ()
+
+ def test_match_args_argument(self):
+ """
+ match_args being False with inheritance.
+ """
+
+ @attr.define(match_args=False)
+ class X:
+ a = attr.field()
+
+ assert "__match_args__" not in X.__dict__
+
+ @attr.define(match_args=False)
+ class Y:
+ a = attr.field()
+ __match_args__ = ("b",)
+
+ assert Y.__match_args__ == ("b",)
+
+ @attr.define(match_args=False)
+ class Z(Y):
+ z = attr.field()
+
+ assert Z.__match_args__ == ("b",)
+
+ @attr.define
+ class A:
+ a = attr.field()
+ z = attr.field()
+
+ @attr.define(match_args=False)
+ class B(A):
+ b = attr.field()
+
+ assert B.__match_args__ == ("a", "z")
+
+ def test_make_class(self):
+ """
+ match_args generation with make_class.
+ """
+
+ C1 = make_class("C1", ["a", "b"])
+ assert ("a", "b") == C1.__match_args__
+
+ C1 = make_class("C1", ["a", "b"], match_args=False)
+ assert not hasattr(C1, "__match_args__")
+
+ C1 = make_class("C1", ["a", "b"], kw_only=True)
+ assert () == C1.__match_args__
+
+ C1 = make_class("C1", {"a": attr.ib(kw_only=True), "b": attr.ib()})
+ assert ("b",) == C1.__match_args__
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml b/testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml
new file mode 100644
index 0000000000..ca17b0a662
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml
@@ -0,0 +1,1395 @@
+- case: attr_s_with_type_argument
+ parametrized:
+ - val: 'a = attr.ib(type=int)'
+ - val: 'a: int = attr.ib()'
+ main: |
+ import attr
+ @attr.s
+ class C:
+ {{ val }}
+ C() # E: Missing positional argument "a" in call to "C"
+ C(1)
+ C(a=1)
+ C(a="hi") # E: Argument "a" to "C" has incompatible type "str"; expected "int"
+- case: attr_s_with_type_annotations
+ main : |
+ import attr
+ @attr.s
+ class C:
+ a: int = attr.ib()
+ C() # E: Missing positional argument "a" in call to "C"
+ C(1)
+ C(a=1)
+ C(a="hi") # E: Argument "a" to "C" has incompatible type "str"; expected "int"
+
+- case: testAttrsSimple
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib()
+ _b = attr.ib()
+ c = attr.ib(18)
+ _d = attr.ib(validator=None, default=18)
+ E = 18
+
+ def foo(self):
+ return self.a
+ reveal_type(A) # N: Revealed type is "def (a: Any, b: Any, c: Any =, d: Any =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4)
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsAnnotated
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.s
+ class A:
+ a: int = attr.ib()
+ _b: List[int] = attr.ib()
+ c: str = attr.ib('18')
+ _d: int = attr.ib(validator=None, default=18)
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsPython2Annotations
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.s
+ class A:
+ a = attr.ib() # type: int
+ _b = attr.ib() # type: List[int]
+ c = attr.ib('18') # type: str
+ _d = attr.ib(validator=None, default=18) # type: int
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsAutoAttribs
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ _b: List[int]
+ c: str = '18'
+ _d: int = attr.ib(validator=None, default=18)
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsUntypedNoUntypedDefs
+ mypy_config: |
+ disallow_untyped_defs = True
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib() # E: Need type annotation for "a"
+ _b = attr.ib() # E: Need type annotation for "_b"
+ c = attr.ib(18) # E: Need type annotation for "c"
+ _d = attr.ib(validator=None, default=18) # E: Need type annotation for "_d"
+ E = 18
+
+- case: testAttrsWrongReturnValue
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x: int = attr.ib(8)
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+ @attr.s
+ class B:
+ x = attr.ib(8) # type: int
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+ @attr.dataclass
+ class C:
+ x: int = 8
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+ @attr.s
+ class D:
+ x = attr.ib(8, type=int)
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+
+- case: testAttrsSeriousNames
+ main: |
+ from attr import attrib, attrs
+ from typing import List
+ @attrs(init=True)
+ class A:
+ a = attrib()
+ _b: List[int] = attrib()
+ c = attrib(18)
+ _d = attrib(validator=None, default=18)
+ CLASS_VAR = 18
+ reveal_type(A) # N: Revealed type is "def (a: Any, b: builtins.list[builtins.int], c: Any =, d: Any =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsDefaultErrors
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x = attr.ib(default=17)
+ y = attr.ib() # E: Non-default attributes not allowed after default attributes.
+ @attr.s(auto_attribs=True)
+ class B:
+ x: int = 17
+ y: int # E: Non-default attributes not allowed after default attributes.
+ @attr.s(auto_attribs=True)
+ class C:
+ x: int = attr.ib(default=17)
+ y: int # E: Non-default attributes not allowed after default attributes.
+ @attr.s
+ class D:
+ x = attr.ib()
+ y = attr.ib() # E: Non-default attributes not allowed after default attributes.
+
+ @x.default
+ def foo(self):
+ return 17
+
+- case: testAttrsNotBooleans
+ main: |
+ import attr
+ x = True
+ @attr.s(cmp=x) # E: "cmp" argument must be True or False.
+ class A:
+ a = attr.ib(init=x) # E: "init" argument must be True or False.
+
+- case: testAttrsInitFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True, init=False)
+ class A:
+ a: int
+ _b: int
+ c: int = 18
+ _d: int = attrib(validator=None, default=18)
+ reveal_type(A) # N: Revealed type is "def () -> main.A"
+ A()
+ A(1, [2]) # E: Too many arguments for "A"
+ A(1, [2], '3', 4) # E: Too many arguments for "A"
+
+- case: testAttrsInitAttribFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs
+ class A:
+ a = attrib(init=False)
+ b = attrib()
+ reveal_type(A) # N: Revealed type is "def (b: Any) -> main.A"
+
+- case: testAttrsCmpTrue
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True)
+ class A:
+ a: int
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> main.A"
+ reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(A.__le__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(A.__gt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(A.__ge__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+
+ A(1) < A(2)
+ A(1) <= A(2)
+ A(1) > A(2)
+ A(1) >= A(2)
+ A(1) == A(2)
+ A(1) != A(2)
+
+ A(1) < 1 # E: Unsupported operand types for < ("A" and "int")
+ A(1) <= 1 # E: Unsupported operand types for <= ("A" and "int")
+ A(1) > 1 # E: Unsupported operand types for > ("A" and "int")
+ A(1) >= 1 # E: Unsupported operand types for >= ("A" and "int")
+ A(1) == 1
+ A(1) != 1
+
+ 1 < A(1) # E: Unsupported operand types for < ("int" and "A")
+ 1 <= A(1) # E: Unsupported operand types for <= ("int" and "A")
+ 1 > A(1) # E: Unsupported operand types for > ("int" and "A")
+ 1 >= A(1) # E: Unsupported operand types for >= ("int" and "A")
+ 1 == A(1)
+ 1 != A(1)
+
+- case: testAttrsEqFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True, eq=False)
+ class A:
+ a: int
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> main.A"
+ reveal_type(A.__eq__) # N: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool"
+ reveal_type(A.__ne__) # N: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool"
+
+ A(1) < A(2) # E: Unsupported left operand type for < ("A")
+ A(1) <= A(2) # E: Unsupported left operand type for <= ("A")
+ A(1) > A(2) # E: Unsupported left operand type for > ("A")
+ A(1) >= A(2) # E: Unsupported left operand type for >= ("A")
+ A(1) == A(2)
+ A(1) != A(2)
+
+ A(1) < 1 # E: Unsupported operand types for > ("int" and "A")
+ A(1) <= 1 # E: Unsupported operand types for >= ("int" and "A")
+ A(1) > 1 # E: Unsupported operand types for < ("int" and "A")
+ A(1) >= 1 # E: Unsupported operand types for <= ("int" and "A")
+ A(1) == 1
+ A(1) != 1
+
+ 1 < A(1) # E: Unsupported operand types for < ("int" and "A")
+ 1 <= A(1) # E: Unsupported operand types for <= ("int" and "A")
+ 1 > A(1) # E: Unsupported operand types for > ("int" and "A")
+ 1 >= A(1) # E: Unsupported operand types for >= ("int" and "A")
+ 1 == A(1)
+ 1 != A(1)
+
+- case: testAttrsOrderFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True, order=False)
+ class A:
+ a: int
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> main.A"
+
+ A(1) < A(2) # E: Unsupported left operand type for < ("A")
+ A(1) <= A(2) # E: Unsupported left operand type for <= ("A")
+ A(1) > A(2) # E: Unsupported left operand type for > ("A")
+ A(1) >= A(2) # E: Unsupported left operand type for >= ("A")
+ A(1) == A(2)
+ A(1) != A(2)
+
+ A(1) < 1 # E: Unsupported operand types for > ("int" and "A")
+ A(1) <= 1 # E: Unsupported operand types for >= ("int" and "A")
+ A(1) > 1 # E: Unsupported operand types for < ("int" and "A")
+ A(1) >= 1 # E: Unsupported operand types for <= ("int" and "A")
+ A(1) == 1
+ A(1) != 1
+
+ 1 < A(1) # E: Unsupported operand types for < ("int" and "A")
+ 1 <= A(1) # E: Unsupported operand types for <= ("int" and "A")
+ 1 > A(1) # E: Unsupported operand types for > ("int" and "A")
+ 1 >= A(1) # E: Unsupported operand types for >= ("int" and "A")
+ 1 == A(1)
+ 1 != A(1)
+
+- case: testAttrsCmpEqOrderValues
+ main: |
+ from attr import attrib, attrs
+ @attrs(cmp=True)
+ class DeprecatedTrue:
+ ...
+
+ @attrs(cmp=False)
+ class DeprecatedFalse:
+ ...
+
+ @attrs(cmp=False, eq=True) # E: Don't mix "cmp" with "eq" and "order"
+ class Mixed:
+ ...
+
+ @attrs(order=True, eq=False) # E: eq must be True if order is True
+ class Confused:
+ ...
+
+
+- case: testAttrsInheritance
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a: int = attr.ib()
+ @attr.s
+ class B:
+ b: str = attr.ib()
+ @attr.s
+ class C(A, B):
+ c: bool = attr.ib()
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: builtins.str, c: builtins.bool) -> main.C"
+
+- case: testAttrsNestedInClasses
+ main: |
+ import attr
+ @attr.s
+ class C:
+ y = attr.ib()
+ @attr.s
+ class D:
+ x: int = attr.ib()
+ reveal_type(C) # N: Revealed type is "def (y: Any) -> main.C"
+ reveal_type(C.D) # N: Revealed type is "def (x: builtins.int) -> main.C.D"
+
+- case: testAttrsInheritanceOverride
+ main: |
+ import attr
+
+ @attr.s
+ class A:
+ a: int = attr.ib()
+ x: int = attr.ib()
+
+ @attr.s
+ class B(A):
+ b: str = attr.ib()
+ x: int = attr.ib(default=22)
+
+ @attr.s
+ class C(B):
+ c: bool = attr.ib() # No error here because the x below overwrites the x above.
+ x: int = attr.ib()
+
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, x: builtins.int) -> main.A"
+ reveal_type(B) # N: Revealed type is "def (a: builtins.int, b: builtins.str, x: builtins.int =) -> main.B"
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: builtins.str, c: builtins.bool, x: builtins.int) -> main.C"
+
+- case: testAttrsTypeEquals
+ main: |
+ import attr
+
+ @attr.s
+ class A:
+ a = attr.ib(type=int)
+ b = attr.ib(18, type=int)
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.int =) -> main.A"
+
+- case: testAttrsFrozen
+ main: |
+ import attr
+
+ @attr.s(frozen=True)
+ class A:
+ a = attr.ib()
+
+ a = A(5)
+ a.a = 16 # E: Property "a" defined in "A" is read-only
+- case: testAttrsNextGenFrozen
+ main: |
+ from attr import frozen, field
+
+ @frozen
+ class A:
+ a = field()
+
+ a = A(5)
+ a.a = 16 # E: Property "a" defined in "A" is read-only
+
+- case: testAttrsNextGenDetect
+ main: |
+ from attr import define, field
+
+ @define
+ class A:
+ a = field()
+
+ @define
+ class B:
+ a: int
+
+ @define
+ class C:
+ a: int = field()
+ b = field()
+
+ @define
+ class D:
+ a: int
+ b = field()
+
+ # TODO: Next Gen hasn't shipped with mypy yet so the following are wrong
+ reveal_type(A) # N: Revealed type is "def (a: Any) -> main.A"
+ reveal_type(B) # N: Revealed type is "def (a: builtins.int) -> main.B"
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: Any) -> main.C"
+ reveal_type(D) # N: Revealed type is "def (b: Any) -> main.D"
+
+- case: testAttrsDataClass
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.dataclass
+ class A:
+ a: int
+ _b: List[str]
+ c: str = '18'
+ _d: int = attr.ib(validator=None, default=18)
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.str], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, ['2'])
+
+- case: testAttrsTypeAlias
+ main: |
+ from typing import List
+ import attr
+ Alias = List[int]
+ @attr.s(auto_attribs=True)
+ class A:
+ Alias2 = List[str]
+ x: Alias
+ y: Alias2 = attr.ib()
+ reveal_type(A) # N: Revealed type is "def (x: builtins.list[builtins.int], y: builtins.list[builtins.str]) -> main.A"
+
+- case: testAttrsGeneric
+ main: |
+ from typing import TypeVar, Generic, List
+ import attr
+ T = TypeVar('T')
+ @attr.s(auto_attribs=True)
+ class A(Generic[T]):
+ x: List[T]
+ y: T = attr.ib()
+ def foo(self) -> List[T]:
+ return [self.y]
+ def bar(self) -> T:
+ return self.x[0]
+ def problem(self) -> T:
+ return self.x # E: Incompatible return value type (got "List[T]", expected "T")
+ reveal_type(A) # N: Revealed type is "def [T] (x: builtins.list[T`1], y: T`1) -> main.A[T`1]"
+ a = A([1], 2)
+ reveal_type(a) # N: Revealed type is "main.A[builtins.int*]"
+ reveal_type(a.x) # N: Revealed type is "builtins.list[builtins.int*]"
+ reveal_type(a.y) # N: Revealed type is "builtins.int*"
+
+ A(['str'], 7) # E: Cannot infer type argument 1 of "A"
+ A([1], '2') # E: Cannot infer type argument 1 of "A"
+
+
+- case: testAttrsUntypedGenericInheritance
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ T = TypeVar("T")
+
+ @attr.s(auto_attribs=True)
+ class Base(Generic[T]):
+ attr: T
+
+ @attr.s(auto_attribs=True)
+ class Sub(Base):
+ pass
+
+ sub = Sub(attr=1)
+ reveal_type(sub) # N: Revealed type is "main.Sub"
+ reveal_type(sub.attr) # N: Revealed type is "Any"
+ skip: True # Need to investigate why this is broken
+
+- case: testAttrsGenericInheritance
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ S = TypeVar("S")
+ T = TypeVar("T")
+
+ @attr.s(auto_attribs=True)
+ class Base(Generic[T]):
+ attr: T
+
+ @attr.s(auto_attribs=True)
+ class Sub(Base[S]):
+ pass
+
+ sub_int = Sub[int](attr=1)
+ reveal_type(sub_int) # N: Revealed type is "main.Sub[builtins.int*]"
+ reveal_type(sub_int.attr) # N: Revealed type is "builtins.int*"
+
+ sub_str = Sub[str](attr='ok')
+ reveal_type(sub_str) # N: Revealed type is "main.Sub[builtins.str*]"
+ reveal_type(sub_str.attr) # N: Revealed type is "builtins.str*"
+
+- case: testAttrsGenericInheritance2
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ T1 = TypeVar("T1")
+ T2 = TypeVar("T2")
+ T3 = TypeVar("T3")
+
+ @attr.s(auto_attribs=True)
+ class Base(Generic[T1, T2, T3]):
+ one: T1
+ two: T2
+ three: T3
+
+ @attr.s(auto_attribs=True)
+ class Sub(Base[int, str, float]):
+ pass
+
+ sub = Sub(one=1, two='ok', three=3.14)
+ reveal_type(sub) # N: Revealed type is "main.Sub"
+ reveal_type(sub.one) # N: Revealed type is "builtins.int*"
+ reveal_type(sub.two) # N: Revealed type is "builtins.str*"
+ reveal_type(sub.three) # N: Revealed type is "builtins.float*"
+ skip: True # Need to investigate why this is broken
+
+- case: testAttrsMultiGenericInheritance
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ T = TypeVar("T")
+
+ @attr.s(auto_attribs=True, eq=False)
+ class Base(Generic[T]):
+ base_attr: T
+
+ S = TypeVar("S")
+
+ @attr.s(auto_attribs=True, eq=False)
+ class Middle(Base[int], Generic[S]):
+ middle_attr: S
+
+ @attr.s(auto_attribs=True, eq=False)
+ class Sub(Middle[str]):
+ pass
+
+ reveal_type(Sub.__init__)
+
+ sub = Sub(base_attr=1, middle_attr='ok')
+ reveal_type(sub) # N: Revealed type is "main.Sub"
+ reveal_type(sub.base_attr) # N: Revealed type is "builtins.int*"
+ reveal_type(sub.middle_attr) # N: Revealed type is "builtins.str*"
+ skip: True # Need to investigate why this is broken
+
+- case: testAttrsGenericClassmethod
+ main: |
+ from typing import TypeVar, Generic, Optional
+ import attr
+ T = TypeVar('T')
+ @attr.s(auto_attribs=True)
+ class A(Generic[T]):
+ x: Optional[T]
+ @classmethod
+ def clsmeth(cls) -> None:
+ reveal_type(cls) # N: Revealed type is "Type[main.A[T`1]]"
+
+- case: testAttrsForwardReference
+ main: |
+ from typing import Optional
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ parent: 'B'
+
+ @attr.s(auto_attribs=True)
+ class B:
+ parent: Optional[A]
+
+ reveal_type(A) # N: Revealed type is "def (parent: main.B) -> main.A"
+ reveal_type(B) # N: Revealed type is "def (parent: Union[main.A, None]) -> main.B"
+ A(B(None))
+
+- case: testAttrsForwardReferenceInClass
+ main: |
+ from typing import Optional
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ parent: A.B
+
+ @attr.s(auto_attribs=True)
+ class B:
+ parent: Optional[A]
+
+ reveal_type(A) # N: Revealed type is "def (parent: main.A.B) -> main.A"
+ reveal_type(A.B) # N: Revealed type is "def (parent: Union[main.A, None]) -> main.A.B"
+ A(A.B(None))
+
+- case: testAttrsImporting
+ main: |
+ from helper import A
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> helper.A"
+ files:
+ - path: helper.py
+ content: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b: str = attr.ib()
+
+- case: testAttrsOtherMethods
+ main: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b: str = attr.ib()
+ @classmethod
+ def new(cls) -> A:
+ reveal_type(cls) # N: Revealed type is "Type[main.A]"
+ return cls(6, 'hello')
+ @classmethod
+ def bad(cls) -> A:
+ return cls(17) # E: Missing positional argument "b" in call to "A"
+ def foo(self) -> int:
+ return self.a
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> main.A"
+ a = A.new()
+ reveal_type(a.foo) # N: Revealed type is "def () -> builtins.int"
+
+- case: testAttrsOtherOverloads
+ main: |
+ import attr
+ from typing import overload, Union
+
+ @attr.s
+ class A:
+ a = attr.ib()
+ b = attr.ib(default=3)
+
+ @classmethod
+ def other(cls) -> str:
+ return "..."
+
+ @overload
+ @classmethod
+ def foo(cls, x: int) -> int: ...
+
+ @overload
+ @classmethod
+ def foo(cls, x: str) -> str: ...
+
+ @classmethod
+ def foo(cls, x: Union[int, str]) -> Union[int, str]:
+ reveal_type(cls) # N: Revealed type is "Type[main.A]"
+ reveal_type(cls.other()) # N: Revealed type is "builtins.str"
+ return x
+
+ reveal_type(A.foo(3)) # N: Revealed type is "builtins.int"
+ reveal_type(A.foo("foo")) # N: Revealed type is "builtins.str"
+
+- case: testAttrsDefaultDecorator
+ main: |
+ import attr
+ @attr.s
+ class C(object):
+ x: int = attr.ib(default=1)
+ y: int = attr.ib()
+ @y.default
+ def name_does_not_matter(self):
+ return self.x + 1
+ C()
+
+- case: testAttrsValidatorDecorator
+ main: |
+ import attr
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ @x.validator
+ def check(self, attribute, value):
+ if value > 42:
+ raise ValueError("x must be smaller or equal to 42")
+ C(42)
+ C(43)
+
+- case: testAttrsLocalVariablesInClassMethod
+ main: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b: int = attr.ib()
+ @classmethod
+ def new(cls, foo: int) -> A:
+ a = foo
+ b = a
+ return cls(a, b)
+
+- case: testAttrsUnionForward
+ main: |
+ import attr
+ from typing import Union, List
+
+ @attr.s(auto_attribs=True)
+ class A:
+ frob: List['AOrB']
+
+ class B:
+ pass
+
+ AOrB = Union[A, B]
+
+ reveal_type(A) # N: Revealed type is "def (frob: builtins.list[Union[main.A, main.B]]) -> main.A"
+ reveal_type(B) # N: Revealed type is "def () -> main.B"
+
+ A([B()])
+
+- case: testAttrsUsingConverter
+ main: |
+ import attr
+ import helper
+
+ def converter2(s:int) -> str:
+ return 'hello'
+
+ @attr.s
+ class C:
+ x: str = attr.ib(converter=helper.converter)
+ y: str = attr.ib(converter=converter2)
+
+ # Because of the converter the __init__ takes an int, but the variable is a str.
+ reveal_type(C) # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> main.C"
+ reveal_type(C(15, 16).x) # N: Revealed type is "builtins.str"
+ files:
+ - path: helper.py
+ content: |
+ def converter(s:int) -> str:
+ return 'hello'
+
+- case: testAttrsUsingBadConverter
+ mypy_config:
+ strict_optional = False
+ main: |
+ import attr
+ from typing import overload
+ @overload
+ def bad_overloaded_converter(x: int, y: int) -> int:
+ ...
+ @overload
+ def bad_overloaded_converter(x: str, y: str) -> str:
+ ...
+ def bad_overloaded_converter(x, y=7):
+ return x
+ def bad_converter() -> str:
+ return ''
+ @attr.dataclass
+ class A:
+ bad: str = attr.ib(converter=bad_converter)
+ bad_overloaded: int = attr.ib(converter=bad_overloaded_converter)
+ reveal_type(A)
+ out: |
+ main:15: error: Cannot determine __init__ type from converter
+ main:15: error: Argument "converter" has incompatible type "Callable[[], str]"; expected "Callable[[Any], Any]"
+ main:16: error: Cannot determine __init__ type from converter
+ main:16: error: Argument "converter" has incompatible type overloaded function; expected "Callable[[Any], Any]"
+ main:17: note: Revealed type is "def (bad: Any, bad_overloaded: Any) -> main.A"
+
+- case: testAttrsUsingBadConverterReprocess
+ mypy_config:
+ strict_optional = False
+ main: |
+ import attr
+ from typing import overload
+ forward: 'A'
+ @overload
+ def bad_overloaded_converter(x: int, y: int) -> int:
+ ...
+ @overload
+ def bad_overloaded_converter(x: str, y: str) -> str:
+ ...
+ def bad_overloaded_converter(x, y=7):
+ return x
+ def bad_converter() -> str:
+ return ''
+ @attr.dataclass
+ class A:
+ bad: str = attr.ib(converter=bad_converter)
+ bad_overloaded: int = attr.ib(converter=bad_overloaded_converter)
+ reveal_type(A)
+ out: |
+ main:16: error: Cannot determine __init__ type from converter
+ main:16: error: Argument "converter" has incompatible type "Callable[[], str]"; expected "Callable[[Any], Any]"
+ main:17: error: Cannot determine __init__ type from converter
+ main:17: error: Argument "converter" has incompatible type overloaded function; expected "Callable[[Any], Any]"
+ main:18: note: Revealed type is "def (bad: Any, bad_overloaded: Any) -> main.A"
+
+- case: testAttrsUsingUnsupportedConverter
+ main: |
+ import attr
+ class Thing:
+ def do_it(self, int) -> str:
+ ...
+ thing = Thing()
+ def factory(default: int):
+ ...
+ @attr.s
+ class C:
+ x: str = attr.ib(converter=thing.do_it) # E: Unsupported converter, only named functions and types are currently supported
+ y: str = attr.ib(converter=lambda x: x) # E: Unsupported converter, only named functions and types are currently supported
+ z: str = attr.ib(converter=factory(8)) # E: Unsupported converter, only named functions and types are currently supported
+ reveal_type(C) # N: Revealed type is "def (x: Any, y: Any, z: Any) -> main.C"
+
+- case: testAttrsUsingConverterAndSubclass
+ main: |
+ import attr
+
+ def converter(s:int) -> str:
+ return 'hello'
+
+ @attr.s
+ class C:
+ x: str = attr.ib(converter=converter)
+
+ @attr.s
+ class A(C):
+ pass
+
+ # Because of the convert the __init__ takes an int, but the variable is a str.
+ reveal_type(A) # N: Revealed type is "def (x: builtins.int) -> main.A"
+ reveal_type(A(15).x) # N: Revealed type is "builtins.str"
+
+- case: testAttrsUsingConverterWithTypes
+ main: |
+ from typing import overload
+ import attr
+
+ @attr.dataclass
+ class A:
+ x: str
+
+ @attr.s
+ class C:
+ x: complex = attr.ib(converter=complex)
+ y: int = attr.ib(converter=int)
+ z: A = attr.ib(converter=A)
+
+ o = C("1", "2", "3")
+ o = C(1, 2, "3")
+
+- case: testAttrsCmpWithSubclasses
+ main: |
+ import attr
+ @attr.s
+ class A: pass
+ @attr.s
+ class B: pass
+ @attr.s
+ class C(A, B): pass
+ @attr.s
+ class D(A): pass
+
+ reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(B.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(C.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(D.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+
+ A() < A()
+ B() < B()
+ A() < B() # E: Unsupported operand types for < ("A" and "B")
+
+ C() > A()
+ C() > B()
+ C() > C()
+ C() > D() # E: Unsupported operand types for > ("C" and "D")
+
+ D() >= A()
+ D() >= B() # E: Unsupported operand types for >= ("D" and "B")
+ D() >= C() # E: Unsupported operand types for >= ("D" and "C")
+ D() >= D()
+
+ A() <= 1 # E: Unsupported operand types for <= ("A" and "int")
+ B() <= 1 # E: Unsupported operand types for <= ("B" and "int")
+ C() <= 1 # E: Unsupported operand types for <= ("C" and "int")
+ D() <= 1 # E: Unsupported operand types for <= ("D" and "int")
+
+- case: testAttrsComplexSuperclass
+ main: |
+ import attr
+ @attr.s
+ class C:
+ x: int = attr.ib(default=1)
+ y: int = attr.ib()
+ @y.default
+ def name_does_not_matter(self):
+ return self.x + 1
+ @attr.s
+ class A(C):
+ z: int = attr.ib(default=18)
+ reveal_type(C) # N: Revealed type is "def (x: builtins.int =, y: builtins.int =) -> main.C"
+ reveal_type(A) # N: Revealed type is "def (x: builtins.int =, y: builtins.int =, z: builtins.int =) -> main.A"
+
+- case: testAttrsMultiAssign
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x, y, z = attr.ib(), attr.ib(type=int), attr.ib(default=17)
+ reveal_type(A) # N: Revealed type is "def (x: Any, y: builtins.int, z: Any =) -> main.A"
+
+- case: testAttrsMultiAssign2
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x = y = z = attr.ib() # E: Too many names for one attribute
+
+- case: testAttrsPrivateInit
+ main: |
+ import attr
+ @attr.s
+ class C(object):
+ _x = attr.ib(init=False, default=42)
+ C()
+ C(_x=42) # E: Unexpected keyword argument "_x" for "C"
+
+- case: testAttrsAutoMustBeAll
+ main: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b = 17
+ # The following forms are not allowed with auto_attribs=True
+ c = attr.ib() # E: Need type annotation for "c"
+ d, e = attr.ib(), attr.ib() # E: Need type annotation for "d" # E: Need type annotation for "e"
+ f = g = attr.ib() # E: Need type annotation for "f" # E: Need type annotation for "g"
+
+- case: testAttrsRepeatedName
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib(default=8)
+ b = attr.ib()
+ a = attr.ib()
+ reveal_type(A) # N: Revealed type is "def (b: Any, a: Any) -> main.A"
+ @attr.s
+ class B:
+ a: int = attr.ib(default=8)
+ b: int = attr.ib()
+ a: int = attr.ib() # E: Name "a" already defined on line 10
+ reveal_type(B) # N: Revealed type is "def (b: builtins.int, a: builtins.int) -> main.B"
+ @attr.s(auto_attribs=True)
+ class C:
+ a: int = 8
+ b: int
+ a: int = attr.ib() # E: Name "a" already defined on line 16
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: builtins.int) -> main.C"
+
+- case: testAttrsNewStyleClassPy2
+ mypy_config:
+ python_version = 2.7
+ main: |
+ import attr
+ @attr.s
+ class Good(object):
+ pass
+ @attr.s
+ class Bad: # E: attrs only works with new-style classes
+ pass
+ skip: True # https://github.com/typeddjango/pytest-mypy-plugins/issues/47
+
+- case: testAttrsAutoAttribsPy2
+ mypy_config: |
+ python_version = 2.7
+ main: |
+ import attr
+ @attr.s(auto_attribs=True) # E: auto_attribs is not supported in Python 2
+ class A(object):
+ x = attr.ib()
+ skip: True # https://github.com/typeddjango/pytest-mypy-plugins/issues/47
+
+- case: testAttrsFrozenSubclass
+ main: |
+ import attr
+
+ @attr.dataclass
+ class NonFrozenBase:
+ a: int
+
+ @attr.dataclass(frozen=True)
+ class FrozenBase:
+ a: int
+
+ @attr.dataclass(frozen=True)
+ class FrozenNonFrozen(NonFrozenBase):
+ b: int
+
+ @attr.dataclass(frozen=True)
+ class FrozenFrozen(FrozenBase):
+ b: int
+
+ @attr.dataclass
+ class NonFrozenFrozen(FrozenBase):
+ b: int
+
+ # Make sure these are untouched
+ non_frozen_base = NonFrozenBase(1)
+ non_frozen_base.a = 17
+ frozen_base = FrozenBase(1)
+ frozen_base.a = 17 # E: Property "a" defined in "FrozenBase" is read-only
+
+ a = FrozenNonFrozen(1, 2)
+ a.a = 17 # E: Property "a" defined in "FrozenNonFrozen" is read-only
+ a.b = 17 # E: Property "b" defined in "FrozenNonFrozen" is read-only
+
+ b = FrozenFrozen(1, 2)
+ b.a = 17 # E: Property "a" defined in "FrozenFrozen" is read-only
+ b.b = 17 # E: Property "b" defined in "FrozenFrozen" is read-only
+
+ c = NonFrozenFrozen(1, 2)
+ c.a = 17 # E: Property "a" defined in "NonFrozenFrozen" is read-only
+ c.b = 17 # E: Property "b" defined in "NonFrozenFrozen" is read-only
+- case: testAttrsCallableAttributes
+ main: |
+ from typing import Callable
+ import attr
+ def blah(a: int, b: int) -> bool:
+ return True
+
+ @attr.s(auto_attribs=True)
+ class F:
+ _cb: Callable[[int, int], bool] = blah
+ def foo(self) -> bool:
+ return self._cb(5, 6)
+
+ @attr.s
+ class G:
+ _cb: Callable[[int, int], bool] = attr.ib(blah)
+ def foo(self) -> bool:
+ return self._cb(5, 6)
+
+ @attr.s(auto_attribs=True, frozen=True)
+ class FFrozen(F):
+ def bar(self) -> bool:
+ return self._cb(5, 6)
+
+- case: testAttrsWithFactory
+ main: |
+ from typing import List
+ import attr
+ def my_factory() -> int:
+ return 7
+ @attr.s
+ class A:
+ x: List[int] = attr.ib(factory=list)
+ y: int = attr.ib(factory=my_factory)
+ A()
+
+- case: testAttrsFactoryAndDefault
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x: int = attr.ib(factory=int, default=7) # E: Can't pass both "default" and "factory".
+
+- case: testAttrsFactoryBadReturn
+ main: |
+ import attr
+ def my_factory() -> int:
+ return 7
+ @attr.s
+ class A:
+ x: int = attr.ib(factory=list) # E: Incompatible types in assignment (expression has type "List[_T]", variable has type "int")
+ y: str = attr.ib(factory=my_factory) # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+- case: testAttrsDefaultAndInit
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ a = attr.ib(init=False, default=42)
+ b = attr.ib() # Ok because previous attribute is init=False
+ c = attr.ib(default=44)
+ d = attr.ib(init=False) # Ok because this attribute is init=False
+ e = attr.ib() # E: Non-default attributes not allowed after default attributes.
+
+- case: testAttrsOptionalConverter
+ main: |
+ # flags: --strict-optional
+ import attr
+ from attr.converters import optional
+ from typing import Optional
+
+ def converter(s:int) -> str:
+ return 'hello'
+
+
+ @attr.s
+ class A:
+ y: Optional[int] = attr.ib(converter=optional(int))
+ z: Optional[str] = attr.ib(converter=optional(converter))
+
+
+ A(None, None)
+
+- case: testAttrsTypeVarNoCollision
+ main: |
+ from typing import TypeVar, Generic
+ import attr
+
+ T = TypeVar("T", bytes, str)
+
+ # Make sure the generated __le__ (and friends) don't use T for their arguments.
+ @attr.s(auto_attribs=True)
+ class A(Generic[T]):
+ v: T
+
+- case: testAttrsKwOnlyAttrib
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib(kw_only=True)
+ A() # E: Missing named argument "a" for "A"
+ A(15) # E: Too many positional arguments for "A"
+ A(a=15)
+
+- case: testAttrsKwOnlyClass
+ main: |
+ import attr
+ @attr.s(kw_only=True, auto_attribs=True)
+ class A:
+ a: int
+ b: bool
+ A() # E: Missing named argument "a" for "A" # E: Missing named argument "b" for "A"
+ A(b=True, a=15)
+
+- case: testAttrsKwOnlyClassNoInit
+ main: |
+ import attr
+ @attr.s(kw_only=True)
+ class B:
+ a = attr.ib(init=False)
+ b = attr.ib()
+ B(b=True)
+
+- case: testAttrsKwOnlyWithDefault
+ main: |
+ import attr
+ @attr.s
+ class C:
+ a = attr.ib(0)
+ b = attr.ib(kw_only=True)
+ c = attr.ib(16, kw_only=True)
+ C(b=17)
+
+- case: testAttrsKwOnlyClassWithMixedDefaults
+ main: |
+ import attr
+ @attr.s(kw_only=True)
+ class D:
+ a = attr.ib(10)
+ b = attr.ib()
+ c = attr.ib(15)
+ D(b=17)
+
+
+- case: testAttrsKwOnlySubclass
+ main: |
+ import attr
+ @attr.s
+ class A2:
+ a = attr.ib(default=0)
+ @attr.s
+ class B2(A2):
+ b = attr.ib(kw_only=True)
+ B2(b=1)
+
+- case: testAttrsNonKwOnlyAfterKwOnly
+ main: |
+ import attr
+ @attr.s(kw_only=True)
+ class A:
+ a = attr.ib(default=0)
+ @attr.s
+ class B(A):
+ b = attr.ib()
+ @attr.s
+ class C:
+ a = attr.ib(kw_only=True)
+ b = attr.ib(15)
+
+- case: testAttrsKwOnlyPy2
+ mypy_config:
+ python_version=2.7
+ main: |
+ import attr
+ @attr.s(kw_only=True) # E: kw_only is not supported in Python 2
+ class A(object):
+ x = attr.ib()
+ @attr.s
+ class B(object):
+ x = attr.ib(kw_only=True) # E: kw_only is not supported in Python 2
+ skip: True # https://github.com/typeddjango/pytest-mypy-plugins/issues/47
+
+- case: testAttrsDisallowUntypedWorksForward
+ main: |
+ # flags: --disallow-untyped-defs
+ import attr
+ from typing import List
+
+ @attr.s
+ class B:
+ x: C = attr.ib()
+
+ class C(List[C]):
+ pass
+
+ reveal_type(B) # N: Revealed type is "def (x: main.C) -> main.B"
+
+- case: testDisallowUntypedWorksForwardBad
+ mypy_config:
+ disallow_untyped_defs = True
+ main: |
+ import attr
+
+ @attr.s
+ class B:
+ x = attr.ib() # E: Need type annotation for "x"
+
+ reveal_type(B) # N: Revealed type is "def (x: Any) -> main.B"
+
+- case: testAttrsDefaultDecoratorDeferred
+ main: |
+ defer: Yes
+
+ import attr
+ @attr.s
+ class C(object):
+ x: int = attr.ib(default=1)
+ y: int = attr.ib()
+ @y.default
+ def inc(self):
+ return self.x + 1
+
+ class Yes: ...
+
+- case: testAttrsValidatorDecoratorDeferred
+ main: |
+ defer: Yes
+
+ import attr
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ @x.validator
+ def check(self, attribute, value):
+ if value > 42:
+ raise ValueError("x must be smaller or equal to 42")
+ C(42)
+ C(43)
+
+ class Yes: ...
+
+- case: testTypeInAttrUndefined
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ total = attr.ib(type=Bad) # E: Name "Bad" is not defined
+
+- case: testTypeInAttrForwardInRuntime
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ total = attr.ib(type=Forward)
+
+ reveal_type(C.total) # N: Revealed type is "main.Forward"
+ C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "Forward"
+ class Forward: ...
+
+- case: testDefaultInAttrForward
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ total = attr.ib(default=func())
+
+ def func() -> int: ...
+
+ C()
+ C(1)
+ C(1, 2) # E: Too many arguments for "C"
+
+- case: testTypeInAttrUndefinedFrozen
+ main: |
+ import attr
+
+ @attr.s(frozen=True)
+ class C:
+ total = attr.ib(type=Bad) # E: Name "Bad" is not defined
+
+ C(0).total = 1 # E: Property "total" defined in "C" is read-only
+
+- case: testTypeInAttrDeferredStar
+ main: |
+ import lib
+ files:
+ - path: lib.py
+ content: |
+ import attr
+ MYPY = False
+ if MYPY: # Force deferral
+ from other import *
+
+ @attr.s
+ class C:
+ total = attr.ib(type=int)
+
+ C() # E: Missing positional argument "total" in call to "C"
+ C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+ - path: other.py
+ content: |
+ import lib
+
+- case: testAttrsDefaultsMroOtherFile
+ main: |
+ import a
+ files:
+ - path: a.py
+ content: |
+ import attr
+ from b import A1, A2
+
+ @attr.s
+ class Asdf(A1, A2): # E: Non-default attributes not allowed after default attributes.
+ pass
+ - path: b.py
+ content: |
+ import attr
+
+ @attr.s
+ class A1:
+ a: str = attr.ib('test')
+
+ @attr.s
+ class A2:
+ b: int = attr.ib()
+
+- case: testAttrsInheritanceNoAnnotation
+ main: |
+ import attr
+
+ @attr.s
+ class A:
+ foo = attr.ib() # type: int
+
+ x = 0
+ @attr.s
+ class B(A):
+ foo = x
+
+ reveal_type(B) # N: Revealed type is "def (foo: builtins.int) -> main.B"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py
new file mode 100644
index 0000000000..8395f9c028
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py
@@ -0,0 +1,440 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Python 3-only integration tests for provisional next generation APIs.
+"""
+
+import re
+
+from functools import partial
+
+import pytest
+
+import attr as _attr # don't use it by accident
+import attrs
+
+
+@attrs.define
+class C:
+ x: str
+ y: int
+
+
+class TestNextGen:
+ def test_simple(self):
+ """
+ Instantiation works.
+ """
+ C("1", 2)
+
+ def test_no_slots(self):
+ """
+ slots can be deactivated.
+ """
+
+ @attrs.define(slots=False)
+ class NoSlots:
+ x: int
+
+ ns = NoSlots(1)
+
+ assert {"x": 1} == getattr(ns, "__dict__")
+
+ def test_validates(self):
+ """
+ Validators at __init__ and __setattr__ work.
+ """
+
+ @attrs.define
+ class Validated:
+ x: int = attrs.field(validator=attrs.validators.instance_of(int))
+
+ v = Validated(1)
+
+ with pytest.raises(TypeError):
+ Validated(None)
+
+ with pytest.raises(TypeError):
+ v.x = "1"
+
+ def test_no_order(self):
+ """
+ Order is off by default but can be added.
+ """
+ with pytest.raises(TypeError):
+ C("1", 2) < C("2", 3)
+
+ @attrs.define(order=True)
+ class Ordered:
+ x: int
+
+ assert Ordered(1) < Ordered(2)
+
+ def test_override_auto_attribs_true(self):
+ """
+ Don't guess if auto_attrib is set explicitly.
+
+ Having an unannotated attrs.ib/attrs.field fails.
+ """
+ with pytest.raises(attrs.exceptions.UnannotatedAttributeError):
+
+ @attrs.define(auto_attribs=True)
+ class ThisFails:
+ x = attrs.field()
+ y: int
+
+ def test_override_auto_attribs_false(self):
+ """
+ Don't guess if auto_attrib is set explicitly.
+
+ Annotated fields that don't carry an attrs.ib are ignored.
+ """
+
+ @attrs.define(auto_attribs=False)
+ class NoFields:
+ x: int
+ y: int
+
+ assert NoFields() == NoFields()
+
+ def test_auto_attribs_detect(self):
+ """
+ define correctly detects if a class lacks type annotations.
+ """
+
+ @attrs.define
+ class OldSchool:
+ x = attrs.field()
+
+ assert OldSchool(1) == OldSchool(1)
+
+ # Test with maybe_cls = None
+ @attrs.define()
+ class OldSchool2:
+ x = attrs.field()
+
+ assert OldSchool2(1) == OldSchool2(1)
+
+ def test_auto_attribs_detect_fields_and_annotations(self):
+ """
+ define infers auto_attribs=True if fields have type annotations
+ """
+
+ @attrs.define
+ class NewSchool:
+ x: int
+ y: list = attrs.field()
+
+ @y.validator
+ def _validate_y(self, attribute, value):
+ if value < 0:
+ raise ValueError("y must be positive")
+
+ assert NewSchool(1, 1) == NewSchool(1, 1)
+ with pytest.raises(ValueError):
+ NewSchool(1, -1)
+ assert list(attrs.fields_dict(NewSchool).keys()) == ["x", "y"]
+
+ def test_auto_attribs_partially_annotated(self):
+ """
+ define infers auto_attribs=True if any type annotations are found
+ """
+
+ @attrs.define
+ class NewSchool:
+ x: int
+ y: list
+ z = 10
+
+ # fields are defined for any annotated attributes
+ assert NewSchool(1, []) == NewSchool(1, [])
+ assert list(attrs.fields_dict(NewSchool).keys()) == ["x", "y"]
+
+ # while the unannotated attributes are left as class vars
+ assert NewSchool.z == 10
+ assert "z" in NewSchool.__dict__
+
+ def test_auto_attribs_detect_annotations(self):
+ """
+ define correctly detects if a class has type annotations.
+ """
+
+ @attrs.define
+ class NewSchool:
+ x: int
+
+ assert NewSchool(1) == NewSchool(1)
+
+ # Test with maybe_cls = None
+ @attrs.define()
+ class NewSchool2:
+ x: int
+
+ assert NewSchool2(1) == NewSchool2(1)
+
+ def test_exception(self):
+ """
+ Exceptions are detected and correctly handled.
+ """
+
+ @attrs.define
+ class E(Exception):
+ msg: str
+ other: int
+
+ with pytest.raises(E) as ei:
+ raise E("yolo", 42)
+
+ e = ei.value
+
+ assert ("yolo", 42) == e.args
+ assert "yolo" == e.msg
+ assert 42 == e.other
+
+ def test_frozen(self):
+ """
+ attrs.frozen freezes classes.
+ """
+
+ @attrs.frozen
+ class F:
+ x: str
+
+ f = F(1)
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ f.x = 2
+
+ def test_auto_detect_eq(self):
+ """
+ auto_detect=True works for eq.
+
+ Regression test for #670.
+ """
+
+ @attrs.define
+ class C:
+ def __eq__(self, o):
+ raise ValueError()
+
+ with pytest.raises(ValueError):
+ C() == C()
+
+ def test_subclass_frozen(self):
+ """
+ It's possible to subclass an `attrs.frozen` class and the frozen-ness
+ is inherited.
+ """
+
+ @attrs.frozen
+ class A:
+ a: int
+
+ @attrs.frozen
+ class B(A):
+ b: int
+
+ @attrs.define(on_setattr=attrs.setters.NO_OP)
+ class C(B):
+ c: int
+
+ assert B(1, 2) == B(1, 2)
+ assert C(1, 2, 3) == C(1, 2, 3)
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ A(1).a = 1
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ B(1, 2).a = 1
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ B(1, 2).b = 2
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ C(1, 2, 3).c = 3
+
+ def test_catches_frozen_on_setattr(self):
+ """
+ Passing frozen=True and on_setattr hooks is caught, even if the
+ immutability is inherited.
+ """
+
+ @attrs.define(frozen=True)
+ class A:
+ pass
+
+ with pytest.raises(
+ ValueError, match="Frozen classes can't use on_setattr."
+ ):
+
+ @attrs.define(frozen=True, on_setattr=attrs.setters.validate)
+ class B:
+ pass
+
+ with pytest.raises(
+ ValueError,
+ match=re.escape(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ ),
+ ):
+
+ @attrs.define(on_setattr=attrs.setters.validate)
+ class C(A):
+ pass
+
+ @pytest.mark.parametrize(
+ "decorator",
+ [
+ partial(_attr.s, frozen=True, slots=True, auto_exc=True),
+ attrs.frozen,
+ attrs.define,
+ attrs.mutable,
+ ],
+ )
+ def test_discard_context(self, decorator):
+ """
+ raise from None works.
+
+ Regression test for #703.
+ """
+
+ @decorator
+ class MyException(Exception):
+ x: str = attrs.field()
+
+ with pytest.raises(MyException) as ei:
+ try:
+ raise ValueError()
+ except ValueError:
+ raise MyException("foo") from None
+
+ assert "foo" == ei.value.x
+ assert ei.value.__cause__ is None
+
+ def test_converts_and_validates_by_default(self):
+ """
+ If no on_setattr is set, assume setters.convert, setters.validate.
+ """
+
+ @attrs.define
+ class C:
+ x: int = attrs.field(converter=int)
+
+ @x.validator
+ def _v(self, _, value):
+ if value < 10:
+ raise ValueError("must be >=10")
+
+ inst = C(10)
+
+ # Converts
+ inst.x = "11"
+
+ assert 11 == inst.x
+
+ # Validates
+ with pytest.raises(ValueError, match="must be >=10"):
+ inst.x = "9"
+
+ def test_mro_ng(self):
+ """
+ Attributes and methods are looked up the same way in NG by default.
+
+ See #428
+ """
+
+ @attrs.define
+ class A:
+
+ x: int = 10
+
+ def xx(self):
+ return 10
+
+ @attrs.define
+ class B(A):
+ y: int = 20
+
+ @attrs.define
+ class C(A):
+ x: int = 50
+
+ def xx(self):
+ return 50
+
+ @attrs.define
+ class D(B, C):
+ pass
+
+ d = D()
+
+ assert d.x == d.xx()
+
+
+class TestAsTuple:
+ def test_smoke(self):
+ """
+ `attrs.astuple` only changes defaults, so we just call it and compare.
+ """
+ inst = C("foo", 42)
+
+ assert attrs.astuple(inst) == _attr.astuple(inst)
+
+
+class TestAsDict:
+ def test_smoke(self):
+ """
+ `attrs.asdict` only changes defaults, so we just call it and compare.
+ """
+ inst = C("foo", {(1,): 42})
+
+ assert attrs.asdict(inst) == _attr.asdict(
+ inst, retain_collection_types=True
+ )
+
+
+class TestImports:
+ """
+ Verify our re-imports and mirroring works.
+ """
+
+ def test_converters(self):
+ """
+ Importing from attrs.converters works.
+ """
+ from attrs.converters import optional
+
+ assert optional is _attr.converters.optional
+
+ def test_exceptions(self):
+ """
+ Importing from attrs.exceptions works.
+ """
+ from attrs.exceptions import FrozenError
+
+ assert FrozenError is _attr.exceptions.FrozenError
+
+ def test_filters(self):
+ """
+ Importing from attrs.filters works.
+ """
+ from attrs.filters import include
+
+ assert include is _attr.filters.include
+
+ def test_setters(self):
+ """
+ Importing from attrs.setters works.
+ """
+ from attrs.setters import pipe
+
+ assert pipe is _attr.setters.pipe
+
+ def test_validators(self):
+ """
+ Importing from attrs.validators works.
+ """
+ from attrs.validators import and_
+
+ assert and_ is _attr.validators.and_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py
new file mode 100644
index 0000000000..590804a8a7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py
@@ -0,0 +1,101 @@
+# SPDX-License-Identifier: MIT
+
+# Keep this file SHORT, until Black can handle it.
+import pytest
+
+import attr
+
+
+class TestPatternMatching:
+ """
+ Pattern matching syntax test cases.
+ """
+
+ @pytest.mark.parametrize("dec", [attr.s, attr.define, attr.frozen])
+ def test_simple_match_case(self, dec):
+ """
+ Simple match case statement works as expected with all class
+ decorators.
+ """
+
+ @dec
+ class C(object):
+ a = attr.ib()
+
+ assert ("a",) == C.__match_args__
+
+ matched = False
+ c = C(a=1)
+ match c:
+ case C(a):
+ matched = True
+
+ assert matched
+ assert 1 == a
+
+ def test_explicit_match_args(self):
+ """
+ Does not overwrite a manually set empty __match_args__.
+ """
+
+ ma = ()
+
+ @attr.define
+ class C:
+ a = attr.field()
+ __match_args__ = ma
+
+ c = C(a=1)
+
+ msg = r"C\(\) accepts 0 positional sub-patterns \(1 given\)"
+ with pytest.raises(TypeError, match=msg):
+ match c:
+ case C(_):
+ pass
+
+ def test_match_args_kw_only(self):
+ """
+ kw_only classes don't generate __match_args__.
+ kw_only fields are not included in __match_args__.
+ """
+
+ @attr.define
+ class C:
+ a = attr.field(kw_only=True)
+ b = attr.field()
+
+ assert ("b",) == C.__match_args__
+
+ c = C(a=1, b=1)
+ msg = r"C\(\) accepts 1 positional sub-pattern \(2 given\)"
+ with pytest.raises(TypeError, match=msg):
+ match c:
+ case C(a, b):
+ pass
+
+ found = False
+ match c:
+ case C(b, a=a):
+ found = True
+
+ assert found
+
+ @attr.define(kw_only=True)
+ class C:
+ a = attr.field()
+ b = attr.field()
+
+ c = C(a=1, b=1)
+ msg = r"C\(\) accepts 0 positional sub-patterns \(2 given\)"
+ with pytest.raises(TypeError, match=msg):
+ match c:
+ case C(a, b):
+ pass
+
+ found = False
+ match c:
+ case C(a=a, b=b):
+ found = True
+
+ assert found
+ assert (1, 1) == (a, b)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py
new file mode 100644
index 0000000000..c30dcc5cb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py
@@ -0,0 +1,71 @@
+# SPDX-License-Identifier: MIT
+
+import json
+import os.path
+import shutil
+import subprocess
+import sys
+
+import pytest
+
+import attr
+
+
+if sys.version_info < (3, 6):
+ _found_pyright = False
+else:
+ _found_pyright = shutil.which("pyright")
+
+
+@attr.s(frozen=True)
+class PyrightDiagnostic(object):
+ severity = attr.ib()
+ message = attr.ib()
+
+
+@pytest.mark.skipif(not _found_pyright, reason="Requires pyright.")
+def test_pyright_baseline():
+ """The __dataclass_transform__ decorator allows pyright to determine
+ attrs decorated class types.
+ """
+
+ test_file = os.path.dirname(__file__) + "/dataclass_transform_example.py"
+
+ pyright = subprocess.run(
+ ["pyright", "--outputjson", str(test_file)], capture_output=True
+ )
+ pyright_result = json.loads(pyright.stdout)
+
+ diagnostics = set(
+ PyrightDiagnostic(d["severity"], d["message"])
+ for d in pyright_result["generalDiagnostics"]
+ )
+
+ # Expected diagnostics as per pyright 1.1.135
+ expected_diagnostics = {
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "Define.__init__" is'
+ ' "(self: Define, a: str, b: int) -> None"',
+ ),
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "DefineConverter.__init__" is '
+ '"(self: DefineConverter, with_converter: int) -> None"',
+ ),
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "d.a" is "Literal[\'new\']"',
+ ),
+ PyrightDiagnostic(
+ severity="error",
+ message='Cannot assign member "a" for type '
+ '"FrozenDefine"\n\xa0\xa0"FrozenDefine" is frozen',
+ ),
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "d2.a" is "Literal[\'new\']"',
+ ),
+ }
+
+ assert diagnostics == expected_diagnostics
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py
new file mode 100644
index 0000000000..aaedde5746
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py
@@ -0,0 +1,437 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import pickle
+
+import pytest
+
+import attr
+
+from attr import setters
+from attr._compat import PY2
+from attr.exceptions import FrozenAttributeError
+from attr.validators import instance_of, matches_re
+
+
+@attr.s(frozen=True)
+class Frozen(object):
+ x = attr.ib()
+
+
+@attr.s
+class WithOnSetAttrHook(object):
+ x = attr.ib(on_setattr=lambda *args: None)
+
+
+class TestSetAttr(object):
+ def test_change(self):
+ """
+ The return value of a hook overwrites the value. But they are not run
+ on __init__.
+ """
+
+ def hook(*a, **kw):
+ return "hooked!"
+
+ @attr.s
+ class Hooked(object):
+ x = attr.ib(on_setattr=hook)
+ y = attr.ib()
+
+ h = Hooked("x", "y")
+
+ assert "x" == h.x
+ assert "y" == h.y
+
+ h.x = "xxx"
+ h.y = "yyy"
+
+ assert "yyy" == h.y
+ assert "hooked!" == h.x
+
+ def test_frozen_attribute(self):
+ """
+ Frozen attributes raise FrozenAttributeError, others are not affected.
+ """
+
+ @attr.s
+ class PartiallyFrozen(object):
+ x = attr.ib(on_setattr=setters.frozen)
+ y = attr.ib()
+
+ pf = PartiallyFrozen("x", "y")
+
+ pf.y = "yyy"
+
+ assert "yyy" == pf.y
+
+ with pytest.raises(FrozenAttributeError):
+ pf.x = "xxx"
+
+ assert "x" == pf.x
+
+ @pytest.mark.parametrize(
+ "on_setattr",
+ [setters.validate, [setters.validate], setters.pipe(setters.validate)],
+ )
+ def test_validator(self, on_setattr):
+ """
+ Validators are run and they don't alter the value.
+ """
+
+ @attr.s(on_setattr=on_setattr)
+ class ValidatedAttribute(object):
+ x = attr.ib()
+ y = attr.ib(validator=[instance_of(str), matches_re("foo.*qux")])
+
+ va = ValidatedAttribute(42, "foobarqux")
+
+ with pytest.raises(TypeError) as ei:
+ va.y = 42
+
+ assert "foobarqux" == va.y
+
+ assert ei.value.args[0].startswith("'y' must be <")
+
+ with pytest.raises(ValueError) as ei:
+ va.y = "quxbarfoo"
+
+ assert ei.value.args[0].startswith("'y' must match regex '")
+
+ assert "foobarqux" == va.y
+
+ va.y = "foobazqux"
+
+ assert "foobazqux" == va.y
+
+ def test_pipe(self):
+ """
+ Multiple hooks are possible, in that case the last return value is
+ used. They can be supplied using the pipe functions or by passing a
+ list to on_setattr.
+ """
+
+ s = [setters.convert, lambda _, __, nv: nv + 1]
+
+ @attr.s
+ class Piped(object):
+ x1 = attr.ib(converter=int, on_setattr=setters.pipe(*s))
+ x2 = attr.ib(converter=int, on_setattr=s)
+
+ p = Piped("41", "22")
+
+ assert 41 == p.x1
+ assert 22 == p.x2
+
+ p.x1 = "41"
+ p.x2 = "22"
+
+ assert 42 == p.x1
+ assert 23 == p.x2
+
+ def test_make_class(self):
+ """
+ on_setattr of make_class gets forwarded.
+ """
+ C = attr.make_class("C", {"x": attr.ib()}, on_setattr=setters.frozen)
+
+ c = C(1)
+
+ with pytest.raises(FrozenAttributeError):
+ c.x = 2
+
+ def test_no_validator_no_converter(self):
+ """
+ validate and convert tolerate missing validators and converters.
+ """
+
+ @attr.s(on_setattr=[setters.convert, setters.validate])
+ class C(object):
+ x = attr.ib()
+
+ c = C(1)
+
+ c.x = 2
+
+ assert 2 == c.x
+
+ def test_validate_respects_run_validators_config(self):
+ """
+ If run validators is off, validate doesn't run them.
+ """
+
+ @attr.s(on_setattr=setters.validate)
+ class C(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+
+ c = C(1)
+
+ attr.set_run_validators(False)
+
+ c.x = "1"
+
+ assert "1" == c.x
+
+ attr.set_run_validators(True)
+
+ with pytest.raises(TypeError) as ei:
+ c.x = "1"
+
+ assert ei.value.args[0].startswith("'x' must be <")
+
+ def test_frozen_on_setattr_class_is_caught(self):
+ """
+ @attr.s(on_setattr=X, frozen=True) raises an ValueError.
+ """
+ with pytest.raises(ValueError) as ei:
+
+ @attr.s(frozen=True, on_setattr=setters.validate)
+ class C(object):
+ x = attr.ib()
+
+ assert "Frozen classes can't use on_setattr." == ei.value.args[0]
+
+ def test_frozen_on_setattr_attribute_is_caught(self):
+ """
+ attr.ib(on_setattr=X) on a frozen class raises an ValueError.
+ """
+
+ with pytest.raises(ValueError) as ei:
+
+ @attr.s(frozen=True)
+ class C(object):
+ x = attr.ib(on_setattr=setters.validate)
+
+ assert "Frozen classes can't use on_setattr." == ei.value.args[0]
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_reset_if_no_custom_setattr(self, slots):
+ """
+ If a class with an active setattr is subclassed and no new setattr
+ is generated, the __setattr__ is set to object.__setattr__.
+
+ We do the double test because of Python 2.
+ """
+
+ def boom(*args):
+ pytest.fail("Must not be called.")
+
+ @attr.s
+ class Hooked(object):
+ x = attr.ib(on_setattr=boom)
+
+ @attr.s(slots=slots)
+ class NoHook(WithOnSetAttrHook):
+ x = attr.ib()
+
+ if not PY2:
+ assert NoHook.__setattr__ == object.__setattr__
+
+ assert 1 == NoHook(1).x
+ assert Hooked.__attrs_own_setattr__
+ assert not NoHook.__attrs_own_setattr__
+ assert WithOnSetAttrHook.__attrs_own_setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_inherited_do_not_reset(self, slots):
+ """
+ If we inherit a __setattr__ that has been written by the user, we must
+ not reset it unless necessary.
+ """
+
+ class A(object):
+ """
+ Not an attrs class on purpose to prevent accidental resets that
+ would render the asserts meaningless.
+ """
+
+ def __setattr__(self, *args):
+ pass
+
+ @attr.s(slots=slots)
+ class B(A):
+ pass
+
+ assert B.__setattr__ == A.__setattr__
+
+ @attr.s(slots=slots)
+ class C(B):
+ pass
+
+ assert C.__setattr__ == A.__setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_pickling_retains_attrs_own(self, slots):
+ """
+ Pickling/Unpickling does not lose ownership information about
+ __setattr__.
+ """
+ i = WithOnSetAttrHook(1)
+
+ assert True is i.__attrs_own_setattr__
+
+ i2 = pickle.loads(pickle.dumps(i))
+
+ assert True is i2.__attrs_own_setattr__
+
+ WOSAH = pickle.loads(pickle.dumps(WithOnSetAttrHook))
+
+ assert True is WOSAH.__attrs_own_setattr__
+
+ def test_slotted_class_can_have_custom_setattr(self):
+ """
+ A slotted class can define a custom setattr and it doesn't get
+ overwritten.
+
+ Regression test for #680.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ def __setattr__(self, key, value):
+ raise SystemError
+
+ with pytest.raises(SystemError):
+ A().x = 1
+
+ @pytest.mark.xfail(raises=attr.exceptions.FrozenAttributeError)
+ def test_slotted_confused(self):
+ """
+ If we have a in-between non-attrs class, setattr reset detection
+ should still work, but currently doesn't.
+
+ It works with dict classes because we can look the finished class and
+ patch it. With slotted classes we have to deduce it ourselves.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ x = attr.ib(on_setattr=setters.frozen)
+
+ class B(A):
+ pass
+
+ @attr.s(slots=True)
+ class C(B):
+ x = attr.ib()
+
+ C(1).x = 2
+
+
+@pytest.mark.skipif(PY2, reason="Python 3-only.")
+class TestSetAttrNoPy2(object):
+ """
+ __setattr__ tests for Py3+ to avoid the skip repetition.
+ """
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_auto_detect_if_no_custom_setattr(self, slots):
+ """
+ It's possible to remove the on_setattr hook from an attribute and
+ therefore write a custom __setattr__.
+ """
+ assert 1 == WithOnSetAttrHook(1).x
+
+ @attr.s(auto_detect=True, slots=slots)
+ class RemoveNeedForOurSetAttr(WithOnSetAttrHook):
+ x = attr.ib()
+
+ def __setattr__(self, name, val):
+ object.__setattr__(self, name, val * 2)
+
+ i = RemoveNeedForOurSetAttr(1)
+
+ assert not RemoveNeedForOurSetAttr.__attrs_own_setattr__
+ assert 2 == i.x
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_restore_respects_auto_detect(self, slots):
+ """
+ If __setattr__ should be restored but the user supplied its own and
+ set auto_detect, leave is alone.
+ """
+
+ @attr.s(auto_detect=True, slots=slots)
+ class CustomSetAttr:
+ def __setattr__(self, _, __):
+ pass
+
+ assert CustomSetAttr.__setattr__ != object.__setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_auto_detect_frozen(self, slots):
+ """
+ frozen=True together with a detected custom __setattr__ are rejected.
+ """
+ with pytest.raises(
+ ValueError, match="Can't freeze a class with a custom __setattr__."
+ ):
+
+ @attr.s(auto_detect=True, slots=slots, frozen=True)
+ class CustomSetAttr(Frozen):
+ def __setattr__(self, _, __):
+ pass
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_auto_detect_on_setattr(self, slots):
+ """
+ on_setattr attributes together with a detected custom __setattr__ are
+ rejected.
+ """
+ with pytest.raises(
+ ValueError,
+ match="Can't combine custom __setattr__ with on_setattr hooks.",
+ ):
+
+ @attr.s(auto_detect=True, slots=slots)
+ class HookAndCustomSetAttr(object):
+ x = attr.ib(on_setattr=lambda *args: None)
+
+ def __setattr__(self, _, __):
+ pass
+
+ @pytest.mark.parametrize("a_slots", [True, False])
+ @pytest.mark.parametrize("b_slots", [True, False])
+ @pytest.mark.parametrize("c_slots", [True, False])
+ def test_setattr_inherited_do_not_reset_intermediate(
+ self, a_slots, b_slots, c_slots
+ ):
+ """
+ A user-provided intermediate __setattr__ is not reset to
+ object.__setattr__.
+
+ This only can work on Python 3+ with auto_detect activated, such that
+ attrs can know that there is a user-provided __setattr__.
+ """
+
+ @attr.s(slots=a_slots)
+ class A(object):
+ x = attr.ib(on_setattr=setters.frozen)
+
+ @attr.s(slots=b_slots, auto_detect=True)
+ class B(A):
+ x = attr.ib(on_setattr=setters.NO_OP)
+
+ def __setattr__(self, key, value):
+ raise SystemError
+
+ @attr.s(slots=c_slots)
+ class C(B):
+ pass
+
+ assert getattr(A, "__attrs_own_setattr__", False) is True
+ assert getattr(B, "__attrs_own_setattr__", False) is False
+ assert getattr(C, "__attrs_own_setattr__", False) is False
+
+ with pytest.raises(SystemError):
+ C(1).x = 3
+
+ def test_docstring(self):
+ """
+ Generated __setattr__ has a useful docstring.
+ """
+ assert (
+ "Method generated by attrs for class WithOnSetAttrHook."
+ == WithOnSetAttrHook.__setattr__.__doc__
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py
new file mode 100644
index 0000000000..baf9a40ddb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py
@@ -0,0 +1,740 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Unit tests for slots-related functionality.
+"""
+
+import pickle
+import sys
+import types
+import weakref
+
+import pytest
+
+import attr
+
+from attr._compat import PY2, PYPY, just_warn, make_set_closure_cell
+
+
+# Pympler doesn't work on PyPy.
+try:
+ from pympler.asizeof import asizeof
+
+ has_pympler = True
+except BaseException: # Won't be an import error.
+ has_pympler = False
+
+
+@attr.s
+class C1(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ if not PY2:
+
+ def my_class(self):
+ return __class__
+
+ def my_super(self):
+ """Just to test out the no-arg super."""
+ return super().__repr__()
+
+
+@attr.s(slots=True, hash=True)
+class C1Slots(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ if not PY2:
+
+ def my_class(self):
+ return __class__
+
+ def my_super(self):
+ """Just to test out the no-arg super."""
+ return super().__repr__()
+
+
+def test_slots_being_used():
+ """
+ The class is really using __slots__.
+ """
+ non_slot_instance = C1(x=1, y="test")
+ slot_instance = C1Slots(x=1, y="test")
+
+ assert "__dict__" not in dir(slot_instance)
+ assert "__slots__" in dir(slot_instance)
+
+ assert "__dict__" in dir(non_slot_instance)
+ assert "__slots__" not in dir(non_slot_instance)
+
+ assert set(["__weakref__", "x", "y"]) == set(slot_instance.__slots__)
+
+ if has_pympler:
+ assert asizeof(slot_instance) < asizeof(non_slot_instance)
+
+ non_slot_instance.t = "test"
+ with pytest.raises(AttributeError):
+ slot_instance.t = "test"
+
+ assert 1 == non_slot_instance.method()
+ assert 1 == slot_instance.method()
+
+ assert attr.fields(C1Slots) == attr.fields(C1)
+ assert attr.asdict(slot_instance) == attr.asdict(non_slot_instance)
+
+
+def test_basic_attr_funcs():
+ """
+ Comparison, `__eq__`, `__hash__`, `__repr__`, `attrs.asdict` work.
+ """
+ a = C1Slots(x=1, y=2)
+ b = C1Slots(x=1, y=3)
+ a_ = C1Slots(x=1, y=2)
+
+ # Comparison.
+ assert b > a
+
+ assert a_ == a
+
+ # Hashing.
+ hash(b) # Just to assert it doesn't raise.
+
+ # Repr.
+ assert "C1Slots(x=1, y=2)" == repr(a)
+
+ assert {"x": 1, "y": 2} == attr.asdict(a)
+
+
+def test_inheritance_from_nonslots():
+ """
+ Inheritance from a non-slotted class works.
+
+ Note that a slotted class inheriting from an ordinary class loses most of
+ the benefits of slotted classes, but it should still work.
+ """
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1):
+ z = attr.ib()
+
+ c2 = C2Slots(x=1, y=2, z="test")
+
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ c2.t = "test" # This will work, using the base class.
+
+ assert "test" == c2.t
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ assert set(["z"]) == set(C2Slots.__slots__)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+
+ assert c3 > c2
+
+ c2_ = C2Slots(x=1, y=2, z="test")
+
+ assert c2 == c2_
+
+ assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+def test_nonslots_these():
+ """
+ Enhancing a dict class using 'these' works.
+
+ This will actually *replace* the class with another one, using slots.
+ """
+
+ class SimpleOrdinaryClass(object):
+ def __init__(self, x, y, z):
+ self.x = x
+ self.y = y
+ self.z = z
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ C2Slots = attr.s(
+ these={"x": attr.ib(), "y": attr.ib(), "z": attr.ib()},
+ init=False,
+ slots=True,
+ hash=True,
+ )(SimpleOrdinaryClass)
+
+ c2 = C2Slots(x=1, y=2, z="test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+ with pytest.raises(AttributeError):
+ c2.t = "test" # We have slots now.
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ assert set(["__weakref__", "x", "y", "z"]) == set(C2Slots.__slots__)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+ assert c3 > c2
+ c2_ = C2Slots(x=1, y=2, z="test")
+ assert c2 == c2_
+
+ assert "SimpleOrdinaryClass(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+def test_inheritance_from_slots():
+ """
+ Inheriting from an attrs slotted class works.
+ """
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1Slots):
+ z = attr.ib()
+
+ @attr.s(slots=True, hash=True)
+ class C2(C1):
+ z = attr.ib()
+
+ c2 = C2Slots(x=1, y=2, z="test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ assert set(["z"]) == set(C2Slots.__slots__)
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ with pytest.raises(AttributeError):
+ c2.t = "test"
+
+ non_slot_instance = C2(x=1, y=2, z="test")
+ if has_pympler:
+ assert asizeof(c2) < asizeof(non_slot_instance)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+ assert c3 > c2
+ c2_ = C2Slots(x=1, y=2, z="test")
+ assert c2 == c2_
+
+ assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+def test_inheritance_from_slots_with_attribute_override():
+ """
+ Inheriting from a slotted class doesn't re-create existing slots
+ """
+
+ class HasXSlot(object):
+ __slots__ = ("x",)
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1Slots):
+ # y re-defined here but it shouldn't get a slot
+ y = attr.ib()
+ z = attr.ib()
+
+ @attr.s(slots=True, hash=True)
+ class NonAttrsChild(HasXSlot):
+ # Parent class has slot for "x" already, so we skip it
+ x = attr.ib()
+ y = attr.ib()
+ z = attr.ib()
+
+ c2 = C2Slots(1, 2, "test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ assert {"z"} == set(C2Slots.__slots__)
+
+ na = NonAttrsChild(1, 2, "test")
+ assert 1 == na.x
+ assert 2 == na.y
+ assert "test" == na.z
+
+ assert {"__weakref__", "y", "z"} == set(NonAttrsChild.__slots__)
+
+
+def test_inherited_slot_reuses_slot_descriptor():
+ """
+ We reuse slot descriptor for an attr.ib defined in a slotted attr.s
+ """
+
+ class HasXSlot(object):
+ __slots__ = ("x",)
+
+ class OverridesX(HasXSlot):
+ @property
+ def x(self):
+ return None
+
+ @attr.s(slots=True)
+ class Child(OverridesX):
+ x = attr.ib()
+
+ assert Child.x is not OverridesX.x
+ assert Child.x is HasXSlot.x
+
+ c = Child(1)
+ assert 1 == c.x
+ assert set() == set(Child.__slots__)
+
+ ox = OverridesX()
+ assert ox.x is None
+
+
+def test_bare_inheritance_from_slots():
+ """
+ Inheriting from a bare attrs slotted class works.
+ """
+
+ @attr.s(
+ init=False, eq=False, order=False, hash=False, repr=False, slots=True
+ )
+ class C1BareSlots(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ @attr.s(init=False, eq=False, order=False, hash=False, repr=False)
+ class C1Bare(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1BareSlots):
+ z = attr.ib()
+
+ @attr.s(slots=True, hash=True)
+ class C2(C1Bare):
+ z = attr.ib()
+
+ c2 = C2Slots(x=1, y=2, z="test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ with pytest.raises(AttributeError):
+ c2.t = "test"
+
+ non_slot_instance = C2(x=1, y=2, z="test")
+ if has_pympler:
+ assert asizeof(c2) < asizeof(non_slot_instance)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+ assert c3 > c2
+ c2_ = C2Slots(x=1, y=2, z="test")
+ assert c2 == c2_
+
+ assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+@pytest.mark.skipif(PY2, reason="closure cell rewriting is PY3-only.")
+class TestClosureCellRewriting(object):
+ def test_closure_cell_rewriting(self):
+ """
+ Slotted classes support proper closure cell rewriting.
+
+ This affects features like `__class__` and the no-arg super().
+ """
+ non_slot_instance = C1(x=1, y="test")
+ slot_instance = C1Slots(x=1, y="test")
+
+ assert non_slot_instance.my_class() is C1
+ assert slot_instance.my_class() is C1Slots
+
+ # Just assert they return something, and not an exception.
+ assert non_slot_instance.my_super()
+ assert slot_instance.my_super()
+
+ def test_inheritance(self):
+ """
+ Slotted classes support proper closure cell rewriting when inheriting.
+
+ This affects features like `__class__` and the no-arg super().
+ """
+
+ @attr.s
+ class C2(C1):
+ def my_subclass(self):
+ return __class__
+
+ @attr.s
+ class C2Slots(C1Slots):
+ def my_subclass(self):
+ return __class__
+
+ non_slot_instance = C2(x=1, y="test")
+ slot_instance = C2Slots(x=1, y="test")
+
+ assert non_slot_instance.my_class() is C1
+ assert slot_instance.my_class() is C1Slots
+
+ # Just assert they return something, and not an exception.
+ assert non_slot_instance.my_super()
+ assert slot_instance.my_super()
+
+ assert non_slot_instance.my_subclass() is C2
+ assert slot_instance.my_subclass() is C2Slots
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_cls_static(self, slots):
+ """
+ Slotted classes support proper closure cell rewriting for class- and
+ static methods.
+ """
+ # Python can reuse closure cells, so we create new classes just for
+ # this test.
+
+ @attr.s(slots=slots)
+ class C:
+ @classmethod
+ def clsmethod(cls):
+ return __class__
+
+ assert C.clsmethod() is C
+
+ @attr.s(slots=slots)
+ class D:
+ @staticmethod
+ def statmethod():
+ return __class__
+
+ assert D.statmethod() is D
+
+ @pytest.mark.skipif(PYPY, reason="set_closure_cell always works on PyPy")
+ @pytest.mark.skipif(
+ sys.version_info >= (3, 8),
+ reason="can't break CodeType.replace() via monkeypatch",
+ )
+ def test_code_hack_failure(self, monkeypatch):
+ """
+ Keeps working if function/code object introspection doesn't work
+ on this (nonstandard) interpeter.
+
+ A warning is emitted that points to the actual code.
+ """
+ # This is a pretty good approximation of the behavior of
+ # the actual types.CodeType on Brython.
+ monkeypatch.setattr(types, "CodeType", lambda: None)
+ func = make_set_closure_cell()
+
+ with pytest.warns(RuntimeWarning) as wr:
+ func()
+
+ w = wr.pop()
+ assert __file__ == w.filename
+ assert (
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ ) == w.message.args
+
+ assert just_warn is func
+
+
+@pytest.mark.skipif(PYPY, reason="__slots__ only block weakref on CPython")
+def test_not_weakrefable():
+ """
+ Instance is not weak-referenceable when `weakref_slot=False` in CPython.
+ """
+
+ @attr.s(slots=True, weakref_slot=False)
+ class C(object):
+ pass
+
+ c = C()
+
+ with pytest.raises(TypeError):
+ weakref.ref(c)
+
+
+@pytest.mark.skipif(
+ not PYPY, reason="slots without weakref_slot should only work on PyPy"
+)
+def test_implicitly_weakrefable():
+ """
+ Instance is weak-referenceable even when `weakref_slot=False` in PyPy.
+ """
+
+ @attr.s(slots=True, weakref_slot=False)
+ class C(object):
+ pass
+
+ c = C()
+ w = weakref.ref(c)
+
+ assert c is w()
+
+
+def test_weakrefable():
+ """
+ Instance is weak-referenceable when `weakref_slot=True`.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ pass
+
+ c = C()
+ w = weakref.ref(c)
+
+ assert c is w()
+
+
+def test_weakref_does_not_add_a_field():
+ """
+ `weakref_slot=True` does not add a field to the class.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ field = attr.ib()
+
+ assert [f.name for f in attr.fields(C)] == ["field"]
+
+
+def tests_weakref_does_not_add_when_inheriting_with_weakref():
+ """
+ `weakref_slot=True` does not add a new __weakref__ slot when inheriting
+ one.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ pass
+
+ @attr.s(slots=True, weakref_slot=True)
+ class D(C):
+ pass
+
+ d = D()
+ w = weakref.ref(d)
+
+ assert d is w()
+
+
+def tests_weakref_does_not_add_with_weakref_attribute():
+ """
+ `weakref_slot=True` does not add a new __weakref__ slot when an attribute
+ of that name exists.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ __weakref__ = attr.ib(
+ init=False, hash=False, repr=False, eq=False, order=False
+ )
+
+ c = C()
+ w = weakref.ref(c)
+
+ assert c is w()
+
+
+def test_slots_empty_cell():
+ """
+ Tests that no `ValueError: Cell is empty` exception is raised when
+ closure cells are present with no contents in a `slots=True` class.
+ (issue https://github.com/python-attrs/attrs/issues/589)
+
+ On Python 3, if a method mentions `__class__` or uses the no-arg `super()`,
+ the compiler will bake a reference to the class in the method itself as
+ `method.__closure__`. Since `attrs` replaces the class with a clone,
+ `_ClassBuilder._create_slots_class(self)` will rewrite these references so
+ it keeps working. This method was not properly covering the edge case where
+ the closure cell was empty, we fixed it and this is the non-regression
+ test.
+ """
+
+ @attr.s(slots=True)
+ class C(object):
+ field = attr.ib()
+
+ def f(self, a):
+ super(C, self).__init__()
+
+ C(field=1)
+
+
+@attr.s(getstate_setstate=True)
+class C2(object):
+ x = attr.ib()
+
+
+@attr.s(slots=True, getstate_setstate=True)
+class C2Slots(object):
+ x = attr.ib()
+
+
+class TestPickle(object):
+ @pytest.mark.parametrize("protocol", range(pickle.HIGHEST_PROTOCOL))
+ def test_pickleable_by_default(self, protocol):
+ """
+ If nothing else is passed, slotted classes can be pickled and
+ unpickled with all supported protocols.
+ """
+ i1 = C1Slots(1, 2)
+ i2 = pickle.loads(pickle.dumps(i1, protocol))
+
+ assert i1 == i2
+ assert i1 is not i2
+
+ def test_no_getstate_setstate_for_dict_classes(self):
+ """
+ As long as getstate_setstate is None, nothing is done to dict
+ classes.
+ """
+ i = C1(1, 2)
+
+ assert None is getattr(i, "__getstate__", None)
+ assert None is getattr(i, "__setstate__", None)
+
+ def test_no_getstate_setstate_if_option_false(self):
+ """
+ Don't add getstate/setstate if getstate_setstate is False.
+ """
+
+ @attr.s(slots=True, getstate_setstate=False)
+ class C(object):
+ x = attr.ib()
+
+ i = C(42)
+
+ assert None is getattr(i, "__getstate__", None)
+ assert None is getattr(i, "__setstate__", None)
+
+ @pytest.mark.parametrize("cls", [C2(1), C2Slots(1)])
+ def test_getstate_set_state_force_true(self, cls):
+ """
+ If getstate_setstate is True, add them unconditionally.
+ """
+ assert None is not getattr(cls, "__getstate__", None)
+ assert None is not getattr(cls, "__setstate__", None)
+
+
+def test_slots_super_property_get():
+ """
+ On Python 2/3: the `super(self.__class__, self)` works.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ x = attr.ib()
+
+ @property
+ def f(self):
+ return self.x
+
+ @attr.s(slots=True)
+ class B(A):
+ @property
+ def f(self):
+ return super(B, self).f ** 2
+
+ assert B(11).f == 121
+ assert B(17).f == 289
+
+
+@pytest.mark.skipif(PY2, reason="shortcut super() is PY3-only.")
+def test_slots_super_property_get_shurtcut():
+ """
+ On Python 3, the `super()` shortcut is allowed.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ x = attr.ib()
+
+ @property
+ def f(self):
+ return self.x
+
+ @attr.s(slots=True)
+ class B(A):
+ @property
+ def f(self):
+ return super().f ** 2
+
+ assert B(11).f == 121
+ assert B(17).f == 289
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py
new file mode 100644
index 0000000000..d7c6de8bad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py
@@ -0,0 +1,952 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr.validators`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import re
+
+import pytest
+
+import attr
+
+from attr import _config, fields, has
+from attr import validators as validator_module
+from attr._compat import PY2, TYPE
+from attr.validators import (
+ and_,
+ deep_iterable,
+ deep_mapping,
+ ge,
+ gt,
+ in_,
+ instance_of,
+ is_callable,
+ le,
+ lt,
+ matches_re,
+ max_len,
+ optional,
+ provides,
+)
+
+from .utils import simple_attr
+
+
+@pytest.fixture(scope="module")
+def zope_interface():
+ """Provides ``zope.interface`` if available, skipping the test if not."""
+ try:
+ import zope.interface
+ except ImportError:
+ raise pytest.skip(
+ "zope-related tests skipped when zope.interface is not installed"
+ )
+
+ return zope.interface
+
+
+class TestDisableValidators(object):
+ @pytest.fixture(autouse=True)
+ def reset_default(self):
+ """
+ Make sure validators are always enabled after a test.
+ """
+ yield
+ _config._run_validators = True
+
+ def test_default(self):
+ """
+ Run validators by default.
+ """
+ assert _config._run_validators is True
+
+ @pytest.mark.parametrize("value, expected", [(True, False), (False, True)])
+ def test_set_validators_diabled(self, value, expected):
+ """
+ Sets `_run_validators`.
+ """
+ validator_module.set_disabled(value)
+
+ assert _config._run_validators is expected
+
+ @pytest.mark.parametrize("value, expected", [(True, False), (False, True)])
+ def test_disabled(self, value, expected):
+ """
+ Returns `_run_validators`.
+ """
+ _config._run_validators = value
+
+ assert validator_module.get_disabled() is expected
+
+ def test_disabled_ctx(self):
+ """
+ The `disabled` context manager disables running validators,
+ but only within its context.
+ """
+ assert _config._run_validators is True
+
+ with validator_module.disabled():
+ assert _config._run_validators is False
+
+ assert _config._run_validators is True
+
+ def test_disabled_ctx_with_errors(self):
+ """
+ Running validators is re-enabled even if an error is raised.
+ """
+ assert _config._run_validators is True
+
+ with pytest.raises(ValueError):
+ with validator_module.disabled():
+ assert _config._run_validators is False
+
+ raise ValueError("haha!")
+
+ assert _config._run_validators is True
+
+
+class TestInstanceOf(object):
+ """
+ Tests for `instance_of`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert instance_of.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ Nothing happens if types match.
+ """
+ v = instance_of(int)
+ v(None, simple_attr("test"), 42)
+
+ def test_subclass(self):
+ """
+ Subclasses are accepted too.
+ """
+ v = instance_of(int)
+ # yep, bools are a subclass of int :(
+ v(None, simple_attr("test"), True)
+
+ def test_fail(self):
+ """
+ Raises `TypeError` on wrong types.
+ """
+ v = instance_of(int)
+ a = simple_attr("test")
+ with pytest.raises(TypeError) as e:
+ v(None, a, "42")
+ assert (
+ "'test' must be <{type} 'int'> (got '42' that is a <{type} "
+ "'str'>).".format(type=TYPE),
+ a,
+ int,
+ "42",
+ ) == e.value.args
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = instance_of(int)
+ assert (
+ "<instance_of validator for type <{type} 'int'>>".format(type=TYPE)
+ ) == repr(v)
+
+
+class TestMatchesRe(object):
+ """
+ Tests for `matches_re`.
+ """
+
+ def test_in_all(self):
+ """
+ validator is in ``__all__``.
+ """
+ assert matches_re.__name__ in validator_module.__all__
+
+ def test_match(self):
+ """
+ Silent on matches, raises ValueError on mismatches.
+ """
+
+ @attr.s
+ class ReTester(object):
+ str_match = attr.ib(validator=matches_re("a|ab"))
+
+ ReTester("ab") # shouldn't raise exceptions
+ with pytest.raises(TypeError):
+ ReTester(1)
+ with pytest.raises(ValueError):
+ ReTester("1")
+ with pytest.raises(ValueError):
+ ReTester("a1")
+
+ def test_flags(self):
+ """
+ Flags are propagated to the match function.
+ """
+
+ @attr.s
+ class MatchTester(object):
+ val = attr.ib(validator=matches_re("a", re.IGNORECASE, re.match))
+
+ MatchTester("A1") # test flags and using re.match
+
+ def test_precompiled_pattern(self):
+ """
+ Pre-compiled patterns are accepted.
+ """
+ pattern = re.compile("a")
+
+ @attr.s
+ class RePatternTester(object):
+ val = attr.ib(validator=matches_re(pattern))
+
+ RePatternTester("a")
+
+ def test_precompiled_pattern_no_flags(self):
+ """
+ A pre-compiled pattern cannot be combined with a 'flags' argument.
+ """
+ pattern = re.compile("")
+
+ with pytest.raises(
+ TypeError, match="can only be used with a string pattern"
+ ):
+ matches_re(pattern, flags=re.IGNORECASE)
+
+ def test_different_func(self):
+ """
+ Changing the match functions works.
+ """
+
+ @attr.s
+ class SearchTester(object):
+ val = attr.ib(validator=matches_re("a", 0, re.search))
+
+ SearchTester("bab") # re.search will match
+
+ def test_catches_invalid_func(self):
+ """
+ Invalid match functions are caught.
+ """
+ with pytest.raises(ValueError) as ei:
+ matches_re("a", 0, lambda: None)
+
+ if not PY2:
+ assert (
+ "'func' must be one of None, fullmatch, match, search."
+ == ei.value.args[0]
+ )
+ else:
+ assert (
+ "'func' must be one of None, match, search."
+ == ei.value.args[0]
+ )
+
+ @pytest.mark.parametrize(
+ "func", [None, getattr(re, "fullmatch", None), re.match, re.search]
+ )
+ def test_accepts_all_valid_func(self, func):
+ """
+ Every valid match function is accepted.
+ """
+ matches_re("a", func=func)
+
+ def test_repr(self):
+ """
+ __repr__ is meaningful.
+ """
+ assert repr(matches_re("a")).startswith(
+ "<matches_re validator for pattern"
+ )
+
+
+def always_pass(_, __, ___):
+ """
+ Toy validator that always passes.
+ """
+
+
+def always_fail(_, __, ___):
+ """
+ Toy validator that always fails.
+ """
+ 0 / 0
+
+
+class TestAnd(object):
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert and_.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ Succeeds if all wrapped validators succeed.
+ """
+ v = and_(instance_of(int), always_pass)
+
+ v(None, simple_attr("test"), 42)
+
+ def test_fail(self):
+ """
+ Fails if any wrapped validator fails.
+ """
+ v = and_(instance_of(int), always_fail)
+
+ with pytest.raises(ZeroDivisionError):
+ v(None, simple_attr("test"), 42)
+
+ def test_sugar(self):
+ """
+ `and_(v1, v2, v3)` and `[v1, v2, v3]` are equivalent.
+ """
+
+ @attr.s
+ class C(object):
+ a1 = attr.ib("a1", validator=and_(instance_of(int)))
+ a2 = attr.ib("a2", validator=[instance_of(int)])
+
+ assert C.__attrs_attrs__[0].validator == C.__attrs_attrs__[1].validator
+
+
+@pytest.fixture(scope="module")
+def ifoo(zope_interface):
+ """Provides a test ``zope.interface.Interface`` in ``zope`` tests."""
+
+ class IFoo(zope_interface.Interface):
+ """
+ An interface.
+ """
+
+ def f():
+ """
+ A function called f.
+ """
+
+ return IFoo
+
+
+class TestProvides(object):
+ """
+ Tests for `provides`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert provides.__name__ in validator_module.__all__
+
+ def test_success(self, zope_interface, ifoo):
+ """
+ Nothing happens if value provides requested interface.
+ """
+
+ @zope_interface.implementer(ifoo)
+ class C(object):
+ def f(self):
+ pass
+
+ v = provides(ifoo)
+ v(None, simple_attr("x"), C())
+
+ def test_fail(self, ifoo):
+ """
+ Raises `TypeError` if interfaces isn't provided by value.
+ """
+ value = object()
+ a = simple_attr("x")
+
+ v = provides(ifoo)
+ with pytest.raises(TypeError) as e:
+ v(None, a, value)
+ assert (
+ "'x' must provide {interface!r} which {value!r} doesn't.".format(
+ interface=ifoo, value=value
+ ),
+ a,
+ ifoo,
+ value,
+ ) == e.value.args
+
+ def test_repr(self, ifoo):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = provides(ifoo)
+ assert (
+ "<provides validator for interface {interface!r}>".format(
+ interface=ifoo
+ )
+ ) == repr(v)
+
+
+@pytest.mark.parametrize(
+ "validator", [instance_of(int), [always_pass, instance_of(int)]]
+)
+class TestOptional(object):
+ """
+ Tests for `optional`.
+ """
+
+ def test_in_all(self, validator):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert optional.__name__ in validator_module.__all__
+
+ def test_success(self, validator):
+ """
+ Nothing happens if validator succeeds.
+ """
+ v = optional(validator)
+ v(None, simple_attr("test"), 42)
+
+ def test_success_with_none(self, validator):
+ """
+ Nothing happens if None.
+ """
+ v = optional(validator)
+ v(None, simple_attr("test"), None)
+
+ def test_fail(self, validator):
+ """
+ Raises `TypeError` on wrong types.
+ """
+ v = optional(validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError) as e:
+ v(None, a, "42")
+ assert (
+ "'test' must be <{type} 'int'> (got '42' that is a <{type} "
+ "'str'>).".format(type=TYPE),
+ a,
+ int,
+ "42",
+ ) == e.value.args
+
+ def test_repr(self, validator):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = optional(validator)
+
+ if isinstance(validator, list):
+ repr_s = (
+ "<optional validator for _AndValidator(_validators=[{func}, "
+ "<instance_of validator for type <{type} 'int'>>]) or None>"
+ ).format(func=repr(always_pass), type=TYPE)
+ else:
+ repr_s = (
+ "<optional validator for <instance_of validator for type "
+ "<{type} 'int'>> or None>"
+ ).format(type=TYPE)
+
+ assert repr_s == repr(v)
+
+
+class TestIn_(object):
+ """
+ Tests for `in_`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert in_.__name__ in validator_module.__all__
+
+ def test_success_with_value(self):
+ """
+ If the value is in our options, nothing happens.
+ """
+ v = in_([1, 2, 3])
+ a = simple_attr("test")
+ v(1, a, 3)
+
+ def test_fail(self):
+ """
+ Raise ValueError if the value is outside our options.
+ """
+ v = in_([1, 2, 3])
+ a = simple_attr("test")
+ with pytest.raises(ValueError) as e:
+ v(None, a, None)
+ assert ("'test' must be in [1, 2, 3] (got None)",) == e.value.args
+
+ def test_fail_with_string(self):
+ """
+ Raise ValueError if the value is outside our options when the
+ options are specified as a string and the value is not a string.
+ """
+ v = in_("abc")
+ a = simple_attr("test")
+ with pytest.raises(ValueError) as e:
+ v(None, a, None)
+ assert ("'test' must be in 'abc' (got None)",) == e.value.args
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = in_([3, 4, 5])
+ assert (("<in_ validator with options [3, 4, 5]>")) == repr(v)
+
+
+class TestDeepIterable(object):
+ """
+ Tests for `deep_iterable`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert deep_iterable.__name__ in validator_module.__all__
+
+ def test_success_member_only(self):
+ """
+ If the member validator succeeds and the iterable validator is not set,
+ nothing happens.
+ """
+ member_validator = instance_of(int)
+ v = deep_iterable(member_validator)
+ a = simple_attr("test")
+ v(None, a, [42])
+
+ def test_success_member_and_iterable(self):
+ """
+ If both the member and iterable validators succeed, nothing happens.
+ """
+ member_validator = instance_of(int)
+ iterable_validator = instance_of(list)
+ v = deep_iterable(member_validator, iterable_validator)
+ a = simple_attr("test")
+ v(None, a, [42])
+
+ @pytest.mark.parametrize(
+ "member_validator, iterable_validator",
+ (
+ (instance_of(int), 42),
+ (42, instance_of(list)),
+ (42, 42),
+ (42, None),
+ ),
+ )
+ def test_noncallable_validators(
+ self, member_validator, iterable_validator
+ ):
+ """
+ Raise `TypeError` if any validators are not callable.
+ """
+ with pytest.raises(TypeError) as e:
+ deep_iterable(member_validator, iterable_validator)
+ value = 42
+ message = "must be callable (got {value} that is a {type_}).".format(
+ value=value, type_=value.__class__
+ )
+
+ assert message in e.value.args[0]
+ assert value == e.value.args[1]
+ assert message in e.value.msg
+ assert value == e.value.value
+
+ def test_fail_invalid_member(self):
+ """
+ Raise member validator error if an invalid member is found.
+ """
+ member_validator = instance_of(int)
+ v = deep_iterable(member_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, [42, "42"])
+
+ def test_fail_invalid_iterable(self):
+ """
+ Raise iterable validator error if an invalid iterable is found.
+ """
+ member_validator = instance_of(int)
+ iterable_validator = instance_of(tuple)
+ v = deep_iterable(member_validator, iterable_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, [42])
+
+ def test_fail_invalid_member_and_iterable(self):
+ """
+ Raise iterable validator error if both the iterable
+ and a member are invalid.
+ """
+ member_validator = instance_of(int)
+ iterable_validator = instance_of(tuple)
+ v = deep_iterable(member_validator, iterable_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, [42, "42"])
+
+ def test_repr_member_only(self):
+ """
+ Returned validator has a useful `__repr__`
+ when only member validator is set.
+ """
+ member_validator = instance_of(int)
+ member_repr = "<instance_of validator for type <{type} 'int'>>".format(
+ type=TYPE
+ )
+ v = deep_iterable(member_validator)
+ expected_repr = (
+ "<deep_iterable validator for iterables of {member_repr}>"
+ ).format(member_repr=member_repr)
+ assert ((expected_repr)) == repr(v)
+
+ def test_repr_member_and_iterable(self):
+ """
+ Returned validator has a useful `__repr__` when both member
+ and iterable validators are set.
+ """
+ member_validator = instance_of(int)
+ member_repr = "<instance_of validator for type <{type} 'int'>>".format(
+ type=TYPE
+ )
+ iterable_validator = instance_of(list)
+ iterable_repr = (
+ "<instance_of validator for type <{type} 'list'>>"
+ ).format(type=TYPE)
+ v = deep_iterable(member_validator, iterable_validator)
+ expected_repr = (
+ "<deep_iterable validator for"
+ " {iterable_repr} iterables of {member_repr}>"
+ ).format(iterable_repr=iterable_repr, member_repr=member_repr)
+ assert expected_repr == repr(v)
+
+
+class TestDeepMapping(object):
+ """
+ Tests for `deep_mapping`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert deep_mapping.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ If both the key and value validators succeed, nothing happens.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ v = deep_mapping(key_validator, value_validator)
+ a = simple_attr("test")
+ v(None, a, {"a": 6, "b": 7})
+
+ @pytest.mark.parametrize(
+ "key_validator, value_validator, mapping_validator",
+ (
+ (42, instance_of(int), None),
+ (instance_of(str), 42, None),
+ (instance_of(str), instance_of(int), 42),
+ (42, 42, None),
+ (42, 42, 42),
+ ),
+ )
+ def test_noncallable_validators(
+ self, key_validator, value_validator, mapping_validator
+ ):
+ """
+ Raise `TypeError` if any validators are not callable.
+ """
+ with pytest.raises(TypeError) as e:
+ deep_mapping(key_validator, value_validator, mapping_validator)
+
+ value = 42
+ message = "must be callable (got {value} that is a {type_}).".format(
+ value=value, type_=value.__class__
+ )
+
+ assert message in e.value.args[0]
+ assert value == e.value.args[1]
+ assert message in e.value.msg
+ assert value == e.value.value
+
+ def test_fail_invalid_mapping(self):
+ """
+ Raise `TypeError` if mapping validator fails.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ mapping_validator = instance_of(dict)
+ v = deep_mapping(key_validator, value_validator, mapping_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, None)
+
+ def test_fail_invalid_key(self):
+ """
+ Raise key validator error if an invalid key is found.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ v = deep_mapping(key_validator, value_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, {"a": 6, 42: 7})
+
+ def test_fail_invalid_member(self):
+ """
+ Raise key validator error if an invalid member value is found.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ v = deep_mapping(key_validator, value_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, {"a": "6", "b": 7})
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ key_validator = instance_of(str)
+ key_repr = "<instance_of validator for type <{type} 'str'>>".format(
+ type=TYPE
+ )
+ value_validator = instance_of(int)
+ value_repr = "<instance_of validator for type <{type} 'int'>>".format(
+ type=TYPE
+ )
+ v = deep_mapping(key_validator, value_validator)
+ expected_repr = (
+ "<deep_mapping validator for objects mapping "
+ "{key_repr} to {value_repr}>"
+ ).format(key_repr=key_repr, value_repr=value_repr)
+ assert expected_repr == repr(v)
+
+
+class TestIsCallable(object):
+ """
+ Tests for `is_callable`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert is_callable.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ If the value is callable, nothing happens.
+ """
+ v = is_callable()
+ a = simple_attr("test")
+ v(None, a, isinstance)
+
+ def test_fail(self):
+ """
+ Raise TypeError if the value is not callable.
+ """
+ v = is_callable()
+ a = simple_attr("test")
+ with pytest.raises(TypeError) as e:
+ v(None, a, None)
+
+ value = None
+ message = "'test' must be callable (got {value} that is a {type_})."
+ expected_message = message.format(value=value, type_=value.__class__)
+
+ assert expected_message == e.value.args[0]
+ assert value == e.value.args[1]
+ assert expected_message == e.value.msg
+ assert value == e.value.value
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = is_callable()
+ assert "<is_callable validator>" == repr(v)
+
+ def test_exception_repr(self):
+ """
+ Verify that NotCallableError exception has a useful `__str__`.
+ """
+ from attr.exceptions import NotCallableError
+
+ instance = NotCallableError(msg="Some Message", value=42)
+ assert "Some Message" == str(instance)
+
+
+def test_hashability():
+ """
+ Validator classes are hashable.
+ """
+ for obj_name in dir(validator_module):
+ obj = getattr(validator_module, obj_name)
+ if not has(obj):
+ continue
+ hash_func = getattr(obj, "__hash__", None)
+ assert hash_func is not None
+ assert hash_func is not object.__hash__
+
+
+class TestLtLeGeGt:
+ """
+ Tests for `max_len`.
+ """
+
+ BOUND = 4
+
+ def test_in_all(self):
+ """
+ validator is in ``__all__``.
+ """
+ assert all(
+ f.__name__ in validator_module.__all__ for f in [lt, le, ge, gt]
+ )
+
+ @pytest.mark.parametrize("v", [lt, le, ge, gt])
+ def test_retrieve_bound(self, v):
+ """
+ The configured bound for the comparison can be extracted from the
+ Attribute.
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=v(self.BOUND))
+
+ assert fields(Tester).value.validator.bound == self.BOUND
+
+ @pytest.mark.parametrize(
+ "v, value",
+ [
+ (lt, 3),
+ (le, 3),
+ (le, 4),
+ (ge, 4),
+ (ge, 5),
+ (gt, 5),
+ ],
+ )
+ def test_check_valid(self, v, value):
+ """Silent if value {op} bound."""
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=v(self.BOUND))
+
+ Tester(value) # shouldn't raise exceptions
+
+ @pytest.mark.parametrize(
+ "v, value",
+ [
+ (lt, 4),
+ (le, 5),
+ (ge, 3),
+ (gt, 4),
+ ],
+ )
+ def test_check_invalid(self, v, value):
+ """Raise ValueError if value {op} bound."""
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=v(self.BOUND))
+
+ with pytest.raises(ValueError):
+ Tester(value)
+
+ @pytest.mark.parametrize("v", [lt, le, ge, gt])
+ def test_repr(self, v):
+ """
+ __repr__ is meaningful.
+ """
+ nv = v(23)
+ assert repr(nv) == "<Validator for x {op} {bound}>".format(
+ op=nv.compare_op, bound=23
+ )
+
+
+class TestMaxLen:
+ """
+ Tests for `max_len`.
+ """
+
+ MAX_LENGTH = 4
+
+ def test_in_all(self):
+ """
+ validator is in ``__all__``.
+ """
+ assert max_len.__name__ in validator_module.__all__
+
+ def test_retrieve_max_len(self):
+ """
+ The configured max. length can be extracted from the Attribute
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=max_len(self.MAX_LENGTH))
+
+ assert fields(Tester).value.validator.max_length == self.MAX_LENGTH
+
+ @pytest.mark.parametrize(
+ "value",
+ [
+ "",
+ "foo",
+ "spam",
+ [],
+ list(range(MAX_LENGTH)),
+ {"spam": 3, "eggs": 4},
+ ],
+ )
+ def test_check_valid(self, value):
+ """
+ Silent if len(value) <= max_len.
+ Values can be strings and other iterables.
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=max_len(self.MAX_LENGTH))
+
+ Tester(value) # shouldn't raise exceptions
+
+ @pytest.mark.parametrize(
+ "value",
+ [
+ "bacon",
+ list(range(6)),
+ ],
+ )
+ def test_check_invalid(self, value):
+ """
+ Raise ValueError if len(value) > max_len.
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=max_len(self.MAX_LENGTH))
+
+ with pytest.raises(ValueError):
+ Tester(value)
+
+ def test_repr(self):
+ """
+ __repr__ is meaningful.
+ """
+ assert repr(max_len(23)) == "<max_len validator for 23>"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py
new file mode 100644
index 0000000000..41f75f47a6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py
@@ -0,0 +1,62 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+from attr import VersionInfo
+from attr._compat import PY2
+
+
+@pytest.fixture(name="vi")
+def fixture_vi():
+ return VersionInfo(19, 2, 0, "final")
+
+
+class TestVersionInfo:
+ def test_from_string_no_releaselevel(self, vi):
+ """
+ If there is no suffix, the releaselevel becomes "final" by default.
+ """
+ assert vi == VersionInfo._from_version_string("19.2.0")
+
+ def test_suffix_is_preserved(self):
+ """
+ If there is a suffix, it's preserved.
+ """
+ assert (
+ "dev0"
+ == VersionInfo._from_version_string("19.2.0.dev0").releaselevel
+ )
+
+ @pytest.mark.skipif(
+ PY2, reason="Python 2 is too YOLO to care about comparability."
+ )
+ @pytest.mark.parametrize("other", [(), (19, 2, 0, "final", "garbage")])
+ def test_wrong_len(self, vi, other):
+ """
+ Comparing with a tuple that has the wrong length raises an error.
+ """
+ assert vi != other
+
+ with pytest.raises(TypeError):
+ vi < other
+
+ @pytest.mark.parametrize("other", [[19, 2, 0, "final"]])
+ def test_wrong_type(self, vi, other):
+ """
+ Only compare to other VersionInfos or tuples.
+ """
+ assert vi != other
+
+ def test_order(self, vi):
+ """
+ Ordering works as expected.
+ """
+ assert vi < (20,)
+ assert vi < (19, 2, 1)
+ assert vi > (0,)
+ assert vi <= (19, 2)
+ assert vi >= (19, 2)
+ assert vi > (19, 2, 0, "dev0")
+ assert vi < (19, 2, 0, "post1")
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py b/testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py
new file mode 100644
index 0000000000..a85c768c10
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py
@@ -0,0 +1,420 @@
+# SPDX-License-Identifier: MIT
+
+import re
+
+from typing import Any, Dict, List, Tuple, Union
+
+import attr
+import attrs
+
+
+# Typing via "type" Argument ---
+
+
+@attr.s
+class C:
+ a = attr.ib(type=int)
+
+
+c = C(1)
+C(a=1)
+
+
+@attr.s
+class D:
+ x = attr.ib(type=List[int])
+
+
+@attr.s
+class E:
+ y = attr.ib(type="List[int]")
+
+
+@attr.s
+class F:
+ z = attr.ib(type=Any)
+
+
+# Typing via Annotations ---
+
+
+@attr.s
+class CC:
+ a: int = attr.ib()
+
+
+cc = CC(1)
+CC(a=1)
+
+
+@attr.s
+class DD:
+ x: List[int] = attr.ib()
+
+
+@attr.s
+class EE:
+ y: "List[int]" = attr.ib()
+
+
+@attr.s
+class FF:
+ z: Any = attr.ib()
+
+
+@attrs.define
+class FFF:
+ z: int
+
+
+FFF(1)
+
+
+# Inheritance --
+
+
+@attr.s
+class GG(DD):
+ y: str = attr.ib()
+
+
+GG(x=[1], y="foo")
+
+
+@attr.s
+class HH(DD, EE):
+ z: float = attr.ib()
+
+
+HH(x=[1], y=[], z=1.1)
+
+
+# same class
+c == cc
+
+
+# Exceptions
+@attr.s(auto_exc=True)
+class Error(Exception):
+ x: int = attr.ib()
+
+
+try:
+ raise Error(1)
+except Error as e:
+ e.x
+ e.args
+ str(e)
+
+
+@attrs.define
+class Error2(Exception):
+ x: int
+
+
+try:
+ raise Error2(1)
+except Error as e:
+ e.x
+ e.args
+ str(e)
+
+
+# Converters
+# XXX: Currently converters can only be functions so none of this works
+# although the stubs should be correct.
+
+# @attr.s
+# class ConvCOptional:
+# x: Optional[int] = attr.ib(converter=attr.converters.optional(int))
+
+
+# ConvCOptional(1)
+# ConvCOptional(None)
+
+
+# @attr.s
+# class ConvCDefaultIfNone:
+# x: int = attr.ib(converter=attr.converters.default_if_none(42))
+
+
+# ConvCDefaultIfNone(1)
+# ConvCDefaultIfNone(None)
+
+
+# @attr.s
+# class ConvCToBool:
+# x: int = attr.ib(converter=attr.converters.to_bool)
+
+
+# ConvCToBool(1)
+# ConvCToBool(True)
+# ConvCToBool("on")
+# ConvCToBool("yes")
+# ConvCToBool(0)
+# ConvCToBool(False)
+# ConvCToBool("n")
+
+
+# Validators
+@attr.s
+class Validated:
+ a = attr.ib(
+ type=List[C],
+ validator=attr.validators.deep_iterable(
+ attr.validators.instance_of(C), attr.validators.instance_of(list)
+ ),
+ )
+ a = attr.ib(
+ type=Tuple[C],
+ validator=attr.validators.deep_iterable(
+ attr.validators.instance_of(C), attr.validators.instance_of(tuple)
+ ),
+ )
+ b = attr.ib(
+ type=List[C],
+ validator=attr.validators.deep_iterable(
+ attr.validators.instance_of(C)
+ ),
+ )
+ c = attr.ib(
+ type=Dict[C, D],
+ validator=attr.validators.deep_mapping(
+ attr.validators.instance_of(C),
+ attr.validators.instance_of(D),
+ attr.validators.instance_of(dict),
+ ),
+ )
+ d = attr.ib(
+ type=Dict[C, D],
+ validator=attr.validators.deep_mapping(
+ attr.validators.instance_of(C), attr.validators.instance_of(D)
+ ),
+ )
+ e: str = attr.ib(validator=attr.validators.matches_re(re.compile(r"foo")))
+ f: str = attr.ib(
+ validator=attr.validators.matches_re(r"foo", flags=42, func=re.search)
+ )
+
+ # Test different forms of instance_of
+ g: int = attr.ib(validator=attr.validators.instance_of(int))
+ h: int = attr.ib(validator=attr.validators.instance_of((int,)))
+ j: Union[int, str] = attr.ib(
+ validator=attr.validators.instance_of((int, str))
+ )
+ k: Union[int, str, C] = attr.ib(
+ validator=attrs.validators.instance_of((int, C, str))
+ )
+
+
+@attr.define
+class Validated2:
+ num: int = attr.field(validator=attr.validators.ge(0))
+
+
+@attrs.define
+class Validated3:
+ num: int = attr.field(validator=attr.validators.ge(0))
+
+
+with attr.validators.disabled():
+ Validated2(num=-1)
+
+with attrs.validators.disabled():
+ Validated3(num=-1)
+
+try:
+ attr.validators.set_disabled(True)
+ Validated2(num=-1)
+finally:
+ attr.validators.set_disabled(False)
+
+
+# Custom repr()
+@attr.s
+class WithCustomRepr:
+ a: int = attr.ib(repr=True)
+ b: str = attr.ib(repr=False)
+ c: str = attr.ib(repr=lambda value: "c is for cookie")
+ d: bool = attr.ib(repr=str)
+
+
+@attrs.define
+class WithCustomRepr2:
+ a: int = attrs.field(repr=True)
+ b: str = attrs.field(repr=False)
+ c: str = attrs.field(repr=lambda value: "c is for cookie")
+ d: bool = attrs.field(repr=str)
+
+
+# Check some of our own types
+@attr.s(eq=True, order=False)
+class OrderFlags:
+ a: int = attr.ib(eq=False, order=False)
+ b: int = attr.ib(eq=True, order=True)
+
+
+# on_setattr hooks
+@attr.s(on_setattr=attr.setters.validate)
+class ValidatedSetter:
+ a: int
+ b: str = attr.ib(on_setattr=attr.setters.NO_OP)
+ c: bool = attr.ib(on_setattr=attr.setters.frozen)
+ d: int = attr.ib(on_setattr=[attr.setters.convert, attr.setters.validate])
+ e: bool = attr.ib(
+ on_setattr=attr.setters.pipe(
+ attr.setters.convert, attr.setters.validate
+ )
+ )
+
+
+@attrs.define(on_setattr=attr.setters.validate)
+class ValidatedSetter2:
+ a: int
+ b: str = attrs.field(on_setattr=attrs.setters.NO_OP)
+ c: bool = attrs.field(on_setattr=attrs.setters.frozen)
+ d: int = attrs.field(
+ on_setattr=[attrs.setters.convert, attrs.setters.validate]
+ )
+ e: bool = attrs.field(
+ on_setattr=attrs.setters.pipe(
+ attrs.setters.convert, attrs.setters.validate
+ )
+ )
+
+
+# field_transformer
+def ft_hook(cls: type, attribs: List[attr.Attribute]) -> List[attr.Attribute]:
+ return attribs
+
+
+# field_transformer
+def ft_hook2(
+ cls: type, attribs: List[attrs.Attribute]
+) -> List[attrs.Attribute]:
+ return attribs
+
+
+@attr.s(field_transformer=ft_hook)
+class TransformedAttrs:
+ x: int
+
+
+@attrs.define(field_transformer=ft_hook2)
+class TransformedAttrs2:
+ x: int
+
+
+# Auto-detect
+@attr.s(auto_detect=True)
+class AutoDetect:
+ x: int
+
+ def __init__(self, x: int):
+ self.x = x
+
+
+# Provisional APIs
+@attr.define(order=True)
+class NGClass:
+ x: int = attr.field(default=42)
+
+
+ngc = NGClass(1)
+
+
+@attr.mutable(slots=False)
+class NGClass2:
+ x: int
+
+
+ngc2 = NGClass2(1)
+
+
+@attr.frozen(str=True)
+class NGFrozen:
+ x: int
+
+
+ngf = NGFrozen(1)
+
+attr.fields(NGFrozen).x.evolve(eq=False)
+a = attr.fields(NGFrozen).x
+a.evolve(repr=False)
+
+
+attrs.fields(NGFrozen).x.evolve(eq=False)
+a = attrs.fields(NGFrozen).x
+a.evolve(repr=False)
+
+
+@attr.s(collect_by_mro=True)
+class MRO:
+ pass
+
+
+@attr.s
+class FactoryTest:
+ a: List[int] = attr.ib(default=attr.Factory(list))
+ b: List[Any] = attr.ib(default=attr.Factory(list, False))
+ c: List[int] = attr.ib(default=attr.Factory((lambda s: s.a), True))
+
+
+@attrs.define
+class FactoryTest2:
+ a: List[int] = attrs.field(default=attrs.Factory(list))
+ b: List[Any] = attrs.field(default=attrs.Factory(list, False))
+ c: List[int] = attrs.field(default=attrs.Factory((lambda s: s.a), True))
+
+
+attrs.asdict(FactoryTest2())
+attr.asdict(FactoryTest(), tuple_keys=True)
+
+
+# Check match_args stub
+@attr.s(match_args=False)
+class MatchArgs:
+ a: int = attr.ib()
+ b: int = attr.ib()
+
+
+attr.asdict(FactoryTest())
+attr.asdict(FactoryTest(), retain_collection_types=False)
+
+
+# Check match_args stub
+@attrs.define(match_args=False)
+class MatchArgs2:
+ a: int
+ b: int
+
+
+# NG versions of asdict/astuple
+attrs.asdict(MatchArgs2(1, 2))
+attrs.astuple(MatchArgs2(1, 2))
+
+
+def importing_from_attr() -> None:
+ """
+ Use a function to keep the ns clean.
+ """
+ from attr.converters import optional
+ from attr.exceptions import FrozenError
+ from attr.filters import include
+ from attr.setters import frozen
+ from attr.validators import and_
+
+ assert optional and FrozenError and include and frozen and and_
+
+
+def importing_from_attrs() -> None:
+ """
+ Use a function to keep the ns clean.
+ """
+ from attrs.converters import optional
+ from attrs.exceptions import FrozenError
+ from attrs.filters import include
+ from attrs.setters import frozen
+ from attrs.validators import and_
+
+ assert optional and FrozenError and include and frozen and and_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/utils.py b/testing/web-platform/tests/tools/third_party/attrs/tests/utils.py
new file mode 100644
index 0000000000..a2fefbd606
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/utils.py
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Common helper functions for tests.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from attr import Attribute
+from attr._make import NOTHING, make_class
+
+
+def simple_class(
+ eq=False,
+ order=False,
+ repr=False,
+ hash=False,
+ str=False,
+ slots=False,
+ frozen=False,
+ cache_hash=False,
+):
+ """
+ Return a new simple class.
+ """
+ return make_class(
+ "C",
+ ["a", "b"],
+ eq=eq or order,
+ order=order,
+ repr=repr,
+ hash=hash,
+ init=True,
+ slots=slots,
+ str=str,
+ frozen=frozen,
+ cache_hash=cache_hash,
+ )
+
+
+def simple_attr(
+ name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ eq=True,
+ hash=None,
+ init=True,
+ converter=None,
+ kw_only=False,
+ inherited=False,
+):
+ """
+ Return an attribute with a name and no other bells and whistles.
+ """
+ return Attribute(
+ name=name,
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ eq=eq,
+ hash=hash,
+ init=init,
+ converter=converter,
+ kw_only=kw_only,
+ inherited=inherited,
+ )
+
+
+class TestSimpleClass(object):
+ """
+ Tests for the testing helper function `make_class`.
+ """
+
+ def test_returns_class(self):
+ """
+ Returns a class object.
+ """
+ assert type is simple_class().__class__
+
+ def returns_distinct_classes(self):
+ """
+ Each call returns a completely new class.
+ """
+ assert simple_class() is not simple_class()
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tox.ini b/testing/web-platform/tests/tools/third_party/attrs/tox.ini
new file mode 100644
index 0000000000..ddcbc4dbbc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tox.ini
@@ -0,0 +1,129 @@
+[pytest]
+addopts = -ra
+testpaths = tests
+xfail_strict = true
+filterwarnings =
+ once::Warning
+ ignore:::pympler[.*]
+
+
+# Keep docs in sync with docs env and .readthedocs.yml.
+[gh-actions]
+python =
+ 2.7: py27
+ 3.5: py35
+ 3.6: py36
+ 3.7: py37
+ 3.8: py38, changelog
+ 3.9: py39, pyright
+ 3.10: py310, manifest, typing, docs
+ pypy-2: pypy
+ pypy-3: pypy3
+
+
+[tox]
+envlist = typing,pre-commit,py27,py35,py36,py37,py38,py39,py310,pypy,pypy3,pyright,manifest,docs,pypi-description,changelog,coverage-report
+isolated_build = True
+
+
+[testenv:docs]
+# Keep basepython in sync with gh-actions and .readthedocs.yml.
+basepython = python3.10
+extras = docs
+commands =
+ sphinx-build -n -T -W -b html -d {envtmpdir}/doctrees docs docs/_build/html
+ sphinx-build -n -T -W -b doctest -d {envtmpdir}/doctrees docs docs/_build/html
+ python -m doctest README.rst
+
+
+[testenv]
+extras = tests
+commands = python -m pytest {posargs}
+
+
+[testenv:py27]
+extras = tests
+commands = coverage run -m pytest {posargs}
+
+
+[testenv:py37]
+extras = tests
+commands = coverage run -m pytest {posargs}
+
+
+[testenv:py310]
+# Python 3.6+ has a number of compile-time warnings on invalid string escapes.
+# PYTHONWARNINGS=d and --no-compile below make them visible during the Tox run.
+basepython = python3.10
+install_command = pip install --no-compile {opts} {packages}
+setenv =
+ PYTHONWARNINGS=d
+extras = tests
+commands = coverage run -m pytest {posargs}
+
+
+[testenv:coverage-report]
+basepython = python3.10
+depends = py27,py37,py310
+skip_install = true
+deps = coverage[toml]>=5.4
+commands =
+ coverage combine
+ coverage report
+
+
+[testenv:pre-commit]
+basepython = python3.10
+skip_install = true
+deps =
+ pre-commit
+passenv = HOMEPATH # needed on Windows
+commands =
+ pre-commit run --all-files
+
+
+[testenv:manifest]
+basepython = python3.10
+deps = check-manifest
+skip_install = true
+commands = check-manifest
+
+
+[testenv:pypi-description]
+basepython = python3.8
+skip_install = true
+deps =
+ twine
+ pip >= 18.0.0
+commands =
+ pip wheel -w {envtmpdir}/build --no-deps .
+ twine check {envtmpdir}/build/*
+
+
+[testenv:changelog]
+basepython = python3.8
+deps = towncrier<21.3
+skip_install = true
+commands = towncrier --draft
+
+
+[testenv:typing]
+basepython = python3.10
+deps = mypy>=0.902
+commands =
+ mypy src/attr/__init__.pyi src/attr/_version_info.pyi src/attr/converters.pyi src/attr/exceptions.pyi src/attr/filters.pyi src/attr/setters.pyi src/attr/validators.pyi
+ mypy tests/typing_example.py
+
+
+[testenv:pyright]
+# Install and configure node and pyright
+# This *could* be folded into a custom install_command
+# Use nodeenv to configure node in the running tox virtual environment
+# Seeing errors using "nodeenv -p"
+# Use npm install -g to install "globally" into the virtual environment
+basepython = python3.9
+deps = nodeenv
+commands =
+ nodeenv --prebuilt --node=lts --force {envdir}
+ npm install -g --no-package-lock --no-save pyright
+ pytest tests/test_pyright.py -vv