summaryrefslogtreecommitdiffstats
path: root/third_party/python/pip_tools
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--third_party/python/pip_tools/pip_tools-5.5.0.dist-info/LICENSE26
-rw-r--r--third_party/python/pip_tools/pip_tools-5.5.0.dist-info/METADATA535
-rw-r--r--third_party/python/pip_tools/pip_tools-5.5.0.dist-info/RECORD28
-rw-r--r--third_party/python/pip_tools/pip_tools-5.5.0.dist-info/WHEEL6
-rw-r--r--third_party/python/pip_tools/pip_tools-5.5.0.dist-info/entry_points.txt4
-rw-r--r--third_party/python/pip_tools/pip_tools-5.5.0.dist-info/top_level.txt1
-rw-r--r--third_party/python/pip_tools/piptools/__init__.py11
-rw-r--r--third_party/python/pip_tools/piptools/__main__.py17
-rw-r--r--third_party/python/pip_tools/piptools/_compat/__init__.py26
-rw-r--r--third_party/python/pip_tools/piptools/_compat/contextlib.py18
-rw-r--r--third_party/python/pip_tools/piptools/_compat/pip_compat.py18
-rw-r--r--third_party/python/pip_tools/piptools/_compat/tempfile.py88
-rw-r--r--third_party/python/pip_tools/piptools/cache.py173
-rw-r--r--third_party/python/pip_tools/piptools/click.py6
-rw-r--r--third_party/python/pip_tools/piptools/exceptions.py66
-rw-r--r--third_party/python/pip_tools/piptools/locations.py25
-rw-r--r--third_party/python/pip_tools/piptools/logging.py62
-rw-r--r--third_party/python/pip_tools/piptools/repositories/__init__.py3
-rw-r--r--third_party/python/pip_tools/piptools/repositories/base.py57
-rw-r--r--third_party/python/pip_tools/piptools/repositories/local.py97
-rw-r--r--third_party/python/pip_tools/piptools/repositories/pypi.py531
-rw-r--r--third_party/python/pip_tools/piptools/resolver.py405
-rw-r--r--third_party/python/pip_tools/piptools/scripts/__init__.py0
-rw-r--r--third_party/python/pip_tools/piptools/scripts/compile.py495
-rw-r--r--third_party/python/pip_tools/piptools/scripts/sync.py214
-rw-r--r--third_party/python/pip_tools/piptools/sync.py216
-rw-r--r--third_party/python/pip_tools/piptools/utils.py384
-rw-r--r--third_party/python/pip_tools/piptools/writer.py243
28 files changed, 3755 insertions, 0 deletions
diff --git a/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/LICENSE b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/LICENSE
new file mode 100644
index 0000000000..64719ca9f5
--- /dev/null
+++ b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/LICENSE
@@ -0,0 +1,26 @@
+Copyright (c). All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of pip-tools nor the names of its contributors may be
+ used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/METADATA b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/METADATA
new file mode 100644
index 0000000000..48f18ca73c
--- /dev/null
+++ b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/METADATA
@@ -0,0 +1,535 @@
+Metadata-Version: 2.1
+Name: pip-tools
+Version: 5.5.0
+Summary: pip-tools keeps your pinned dependencies fresh.
+Home-page: https://github.com/jazzband/pip-tools/
+Author: Vincent Driessen
+Author-email: me@nvie.com
+License: BSD
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: System :: Systems Administration
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Requires-Dist: click (>=7)
+Requires-Dist: pip (>=20.1)
+Provides-Extra: coverage
+Requires-Dist: pytest-cov ; extra == 'coverage'
+Provides-Extra: testing
+Requires-Dist: mock ; extra == 'testing'
+Requires-Dist: pytest ; extra == 'testing'
+Requires-Dist: pytest-rerunfailures ; extra == 'testing'
+
+|jazzband| |pypi| |pyversions| |buildstatus-gha| |codecov|
+
+==================================
+pip-tools = pip-compile + pip-sync
+==================================
+
+A set of command line tools to help you keep your ``pip``-based packages fresh,
+even when you've pinned them. You do pin them, right? (In building your Python application and its dependencies for production, you want to make sure that your builds are predictable and deterministic.)
+
+.. image:: https://github.com/jazzband/pip-tools/raw/master/img/pip-tools-overview.png
+ :alt: pip-tools overview for phase II
+
+.. |buildstatus-gha| image:: https://github.com/jazzband/pip-tools/workflows/CI/badge.svg
+ :alt: GitHub Actions build status
+ :target: https://github.com/jazzband/pip-tools/actions?query=workflow%3ACI
+.. |codecov| image:: https://codecov.io/gh/jazzband/pip-tools/branch/master/graph/badge.svg
+ :alt: Coverage
+ :target: https://codecov.io/gh/jazzband/pip-tools
+.. |jazzband| image:: https://jazzband.co/static/img/badge.svg
+ :alt: Jazzband
+ :target: https://jazzband.co/
+.. |pypi| image:: https://img.shields.io/pypi/v/pip-tools.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/pip-tools/
+.. |pyversions| image:: https://img.shields.io/pypi/pyversions/pip-tools.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/pip-tools/
+.. _You do pin them, right?: http://nvie.com/posts/pin-your-packages/
+
+
+Installation
+============
+
+Similar to ``pip``, ``pip-tools`` must be installed in each of your project's
+`virtual environments`_:
+
+.. code-block:: bash
+
+ $ source /path/to/venv/bin/activate
+ (venv)$ python -m pip install pip-tools
+
+**Note**: all of the remaining example commands assume you've activated your
+project's virtual environment.
+
+.. _virtual environments: https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments
+
+Example usage for ``pip-compile``
+=================================
+
+The ``pip-compile`` command lets you compile a ``requirements.txt`` file from
+your dependencies, specified in either ``setup.py`` or ``requirements.in``.
+
+Run it with ``pip-compile`` or ``python -m piptools compile``. If you use
+multiple Python versions, you can run ``pip-compile`` as ``py -X.Y -m piptools
+compile`` on Windows and ``pythonX.Y -m piptools compile`` on other systems.
+
+``pip-compile`` should be run from the same virtual environment as your
+project so conditional dependencies that require a specific Python version,
+or other environment markers, resolve relative to your project's
+environment.
+
+**Note**: ensure you don't have ``requirements.txt`` if you compile
+``setup.py`` or ``requirements.in`` from scratch, otherwise, it might
+interfere.
+
+Requirements from ``setup.py``
+------------------------------
+
+Suppose you have a Django project, and want to pin it for production.
+If you have a ``setup.py`` with ``install_requires=['django']``, then run
+``pip-compile`` without any arguments:
+
+.. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ asgiref==3.2.3
+ # via django
+ django==3.0.3
+ # via my_django_project (setup.py)
+ pytz==2019.3
+ # via django
+ sqlparse==0.3.0
+ # via django
+
+``pip-compile`` will produce your ``requirements.txt``, with all the Django
+dependencies (and all underlying dependencies) pinned.
+
+Without ``setup.py``
+--------------------
+
+If you don't use ``setup.py`` (`it's easy to write one`_), you can create a
+``requirements.in`` file to declare the Django dependency:
+
+.. code-block:: ini
+
+ # requirements.in
+ django
+
+Now, run ``pip-compile requirements.in``:
+
+.. code-block:: bash
+
+ $ pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile requirements.in
+ #
+ asgiref==3.2.3
+ # via django
+ django==3.0.3
+ # via -r requirements.in
+ pytz==2019.3
+ # via django
+ sqlparse==0.3.0
+ # via django
+
+And it will produce your ``requirements.txt``, with all the Django dependencies
+(and all underlying dependencies) pinned.
+
+.. _it's easy to write one: https://packaging.python.org/guides/distributing-packages-using-setuptools/#configuring-your-project
+
+Using hashes
+------------
+
+If you would like to use *Hash-Checking Mode* available in ``pip`` since
+version 8.0, ``pip-compile`` offers ``--generate-hashes`` flag:
+
+.. code-block:: bash
+
+ $ pip-compile --generate-hashes requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile --generate-hashes requirements.in
+ #
+ asgiref==3.2.3 \
+ --hash=sha256:7e06d934a7718bf3975acbf87780ba678957b87c7adc056f13b6215d610695a0 \
+ --hash=sha256:ea448f92fc35a0ef4b1508f53a04c4670255a3f33d22a81c8fc9c872036adbe5 \
+ # via django
+ django==3.0.3 \
+ --hash=sha256:2f1ba1db8648484dd5c238fb62504777b7ad090c81c5f1fd8d5eb5ec21b5f283 \
+ --hash=sha256:c91c91a7ad6ef67a874a4f76f58ba534f9208412692a840e1d125eb5c279cb0a \
+ # via -r requirements.in
+ pytz==2019.3 \
+ --hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
+ --hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be \
+ # via django
+ sqlparse==0.3.0 \
+ --hash=sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177 \
+ --hash=sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873 \
+ # via django
+
+Updating requirements
+---------------------
+
+To update all packages, periodically re-run ``pip-compile --upgrade``.
+
+To update a specific package to the latest or a specific version use the
+``--upgrade-package`` or ``-P`` flag:
+
+.. code-block:: bash
+
+ # only update the django package
+ $ pip-compile --upgrade-package django
+
+ # update both the django and requests packages
+ $ pip-compile --upgrade-package django --upgrade-package requests
+
+ # update the django package to the latest, and requests to v2.0.0
+ $ pip-compile --upgrade-package django --upgrade-package requests==2.0.0
+
+You can combine ``--upgrade`` and ``--upgrade-package`` in one command, to
+provide constraints on the allowed upgrades. For example to upgrade all
+packages whilst constraining requests to the latest version less than 3.0:
+
+.. code-block:: bash
+
+ $ pip-compile --upgrade --upgrade-package 'requests<3.0'
+
+Output File
+-----------
+
+To output the pinned requirements in a filename other than
+``requirements.txt``, use ``--output-file``. This might be useful for compiling
+multiple files, for example with different constraints on django to test a
+library with both versions using `tox <https://tox.readthedocs.io/en/latest/>`__:
+
+.. code-block:: bash
+
+ $ pip-compile --upgrade-package 'django<1.0' --output-file requirements-django0x.txt
+ $ pip-compile --upgrade-package 'django<2.0' --output-file requirements-django1x.txt
+
+Or to output to standard output, use ``--output-file=-``:
+
+.. code-block:: bash
+
+ $ pip-compile --output-file=- > requirements.txt
+ $ pip-compile - --output-file=- < requirements.in > requirements.txt
+
+Forwarding options to ``pip``
+-----------------------------
+
+Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
+``--pip-args`` option, e.g.
+
+.. code-block:: bash
+
+ $ pip-compile requirements.in --pip-args '--retries 10 --timeout 30'
+
+Configuration
+-------------
+
+You might be wrapping the ``pip-compile`` command in another script. To avoid
+confusing consumers of your custom script you can override the update command
+generated at the top of requirements files by setting the
+``CUSTOM_COMPILE_COMMAND`` environment variable.
+
+.. code-block:: bash
+
+ $ CUSTOM_COMPILE_COMMAND="./pipcompilewrapper" pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # ./pipcompilewrapper
+ #
+ asgiref==3.2.3
+ # via django
+ django==3.0.3
+ # via -r requirements.in
+ pytz==2019.3
+ # via django
+ sqlparse==0.3.0
+ # via django
+
+Workflow for layered requirements
+---------------------------------
+
+If you have different environments that you need to install different but
+compatible packages for, then you can create layered requirements files and use
+one layer to constrain the other.
+
+For example, if you have a Django project where you want the newest ``2.1``
+release in production and when developing you want to use the Django debug
+toolbar, then you can create two ``*.in`` files, one for each layer:
+
+.. code-block:: ini
+
+ # requirements.in
+ django<2.2
+
+At the top of the development requirements ``dev-requirements.in`` you use ``-c
+requirements.txt`` to constrain the dev requirements to packages already
+selected for production in ``requirements.txt``.
+
+.. code-block:: ini
+
+ # dev-requirements.in
+ -c requirements.txt
+ django-debug-toolbar
+
+First, compile ``requirements.txt`` as usual:
+
+.. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ django==2.1.15
+ # via -r requirements.in
+ pytz==2019.3
+ # via django
+
+
+Now compile the dev requirements and the ``requirements.txt`` file is used as
+a constraint:
+
+.. code-block:: bash
+
+ $ pip-compile dev-requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile dev-requirements.in
+ #
+ django-debug-toolbar==2.2
+ # via -r dev-requirements.in
+ django==2.1.15
+ # via
+ # -c requirements.txt
+ # django-debug-toolbar
+ pytz==2019.3
+ # via
+ # -c requirements.txt
+ # django
+ sqlparse==0.3.0
+ # via django-debug-toolbar
+
+As you can see above, even though a ``2.2`` release of Django is available, the
+dev requirements only include a ``2.1`` version of Django because they were
+constrained. Now both compiled requirements files can be installed safely in
+the dev environment.
+
+To install requirements in production stage use:
+
+.. code-block:: bash
+
+ $ pip-sync
+
+You can install requirements in development stage by:
+
+.. code-block:: bash
+
+ $ pip-sync requirements.txt dev-requirements.txt
+
+
+Version control integration
+---------------------------
+
+You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
+See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
+Sample ``.pre-commit-config.yaml``:
+
+.. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+
+You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
+
+.. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+ files: ^requirements/production\.(in|txt)$
+ args: [--index-url=https://example.com, requirements/production.in]
+
+
+Example usage for ``pip-sync``
+==============================
+
+Now that you have a ``requirements.txt``, you can use ``pip-sync`` to update
+your virtual environment to reflect exactly what's in there. This will
+install/upgrade/uninstall everything necessary to match the
+``requirements.txt`` contents.
+
+Run it with ``pip-sync`` or ``python -m piptools sync``. If you use multiple
+Python versions, you can also run ``py -X.Y -m piptools sync`` on Windows and
+``pythonX.Y -m piptools sync`` on other systems.
+
+``pip-sync`` must be installed into and run from the same virtual
+environment as your project to identify which packages to install
+or upgrade.
+
+**Be careful**: ``pip-sync`` is meant to be used only with a
+``requirements.txt`` generated by ``pip-compile``.
+
+.. code-block:: bash
+
+ $ pip-sync
+ Uninstalling flake8-2.4.1:
+ Successfully uninstalled flake8-2.4.1
+ Collecting click==4.1
+ Downloading click-4.1-py2.py3-none-any.whl (62kB)
+ 100% |................................| 65kB 1.8MB/s
+ Found existing installation: click 4.0
+ Uninstalling click-4.0:
+ Successfully uninstalled click-4.0
+ Successfully installed click-4.1
+
+To sync multiple ``*.txt`` dependency lists, just pass them in via command
+line arguments, e.g.
+
+.. code-block:: bash
+
+ $ pip-sync dev-requirements.txt requirements.txt
+
+Passing in empty arguments would cause it to default to ``requirements.txt``.
+
+Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
+``--pip-args`` option, e.g.
+
+.. code-block:: bash
+
+ $ pip-sync requirements.txt --pip-args '--no-cache-dir --no-deps'
+
+If you use multiple Python versions, you can run ``pip-sync`` as
+``py -X.Y -m piptools sync ...`` on Windows and
+``pythonX.Y -m piptools sync ...`` on other systems.
+
+**Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
+``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
+to upgrade those packages.
+
+Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
+===============================================================================
+
+Generally, yes. If you want a reproducible environment installation available from your source control,
+then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
+
+Note that if you are deploying on multiple Python environments (read the section below),
+then you must commit a seperate output file for each Python environment.
+We suggest to use the ``{env}-requirements.txt`` format
+(ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.).
+
+
+Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
+=======================================================================================
+
+The dependencies of a package can change depending on the Python environment in which it
+is installed. Here, we define a Python environment as the combination of Operating
+System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy,
+etc.). For an exact definition, refer to the possible combinations of `PEP 508
+environment markers`_.
+
+As the resulting ``requirements.txt`` can differ for each environment, users must
+execute ``pip-compile`` **on each Python environment separately** to generate a
+``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
+be used as the source file for all environments, using `PEP 508 environment markers`_ as
+needed, the same way it would be done for regular ``pip`` cross-environment usage.
+
+If the generated ``requirements.txt`` remains exactly the same for all Python
+environments, then it can be used across Python environments safely. **But** users
+should be careful as any package update can introduce environment-dependant
+dependencies, making any newly generated ``requirements.txt`` environment-dependant too.
+As a general rule, it's advised that users should still always execute ``pip-compile``
+on each targeted Python environment to avoid issues.
+
+.. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
+
+Other useful tools
+==================
+
+- `pipdeptree`_ to print the dependency tree of the installed packages.
+- ``requirements.in``/``requirements.txt`` syntax highlighting:
+
+ * `requirements.txt.vim`_ for Vim.
+ * `Python extension for VS Code`_ for VS Code.
+ * `pip-requirements.el`_ for Emacs.
+
+.. _pipdeptree: https://github.com/naiquevin/pipdeptree
+.. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
+.. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
+.. _pip-requirements.el: https://github.com/Wilfred/pip-requirements.el
+
+
+Deprecations
+============
+
+This section lists ``pip-tools`` features that are currently deprecated.
+
+- ``--index/--no-index`` command-line options, use instead
+ ``--emit-index-url/--no-emit-index-url`` (since 5.2.0).
+
+- In future versions, the ``--allow-unsafe`` behavior will be enabled by
+ default. Use ``--no-allow-unsafe`` to keep the old behavior. It is
+ recommended to pass the ``--allow-unsafe`` now to adapt to the upcoming
+ change.
+
+Versions and compatibility
+==========================
+
+The table below summarizes the latest ``pip-tools`` versions with the required
+``pip`` and Python versions. Generally, ``pip-tools`` supports the same Python
+versions as the required ``pip`` versions.
+
++---------------+----------------+----------------+
+| pip-tools | pip | Python |
++===============+================+================+
+| 4.5.* | 8.1.3 - 20.0.2 | 2.7, 3.5 - 3.8 |
++---------------+----------------+----------------+
+| 5.0.0 - 5.3.0 | 20.0 - 20.1.1 | 2.7, 3.5 - 3.8 |
++---------------+----------------+----------------+
+| 5.4.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.8 |
++---------------+----------------+----------------+
+| >= 5.5.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.9 |
++---------------+----------------+----------------+
+
+
diff --git a/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/RECORD b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/RECORD
new file mode 100644
index 0000000000..35941ede11
--- /dev/null
+++ b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/RECORD
@@ -0,0 +1,28 @@
+piptools/__init__.py,sha256=8thn-dOvLOIMk8yl333hqovhg76G31sLazxfZ01E3NQ,379
+piptools/__main__.py,sha256=AyR6tAMb4vvaRLpmGIOAraTVt-XRdy3J3A2TcU_D_tM,267
+piptools/cache.py,sha256=ChWcRnWmG7TDhdTTNI4Hq_hAj5emFBXzT56cMh3sMJY,5694
+piptools/click.py,sha256=6G25l-dJ7Bx7KqI9EOr86rHfN_r-1x_Ums1uMY7BM-8,128
+piptools/exceptions.py,sha256=IxtKvKycggxF6ilRRS-dusqIYa8fQp0czqiZ-OSqoP4,2074
+piptools/locations.py,sha256=tQNE273jbYQN2jSIbPO171VAdT44aWHCIz-sl352Zxg,776
+piptools/logging.py,sha256=CCQlSuo5NbjxusFef8tM3IGkS2upYU88zFnSlYEXCAk,1492
+piptools/resolver.py,sha256=WjIl95AMr3nCcS9Tit07EpohSnhzkUUPBb9cmlbT9sY,16172
+piptools/sync.py,sha256=co7EGsnrE6pZ6Sco5vZgprvZ2m-rXM6zwX188bNSN9o,7048
+piptools/utils.py,sha256=gVvsb5OqI4DHvSU5XMrIppIjtBAnGimFGBnRVWyLjzg,11679
+piptools/writer.py,sha256=-2D7aqLHe64DOJ9c5iOhEYTKLtrIe2pFmsDTUgtpsNo,8205
+piptools/_compat/__init__.py,sha256=3T3-yQL4Wjej3xmRq716nnOwOXOsaVfNYl4BXjegPaA,606
+piptools/_compat/contextlib.py,sha256=4K3xvf7Ujd_KfUVkXRVLxvyQ2L36072t02Vifc7c6TU,602
+piptools/_compat/pip_compat.py,sha256=RfjQn6ezQqHOLpo1uMka_pI9MYN55OHnrdy3XaKKkTk,701
+piptools/_compat/tempfile.py,sha256=m5d22eGPL2vFwUi0kp33_Tr9UQg1QBQnQ1LJ04HlAzI,2862
+piptools/repositories/__init__.py,sha256=AuVddJd2QqAuTgmmF4JEm1JZ8O7BS9oFMnRmzVOSBxc,95
+piptools/repositories/base.py,sha256=YQdb5XUo-24fjUpS8PXMCWMQY57KGV8byonRr3ndgiE,1887
+piptools/repositories/local.py,sha256=cPRdAGCNFHguAuk2k47e7_XUas9Z_Ct-rbLd9WY59YI,3230
+piptools/repositories/pypi.py,sha256=FEehUjtegCl4A0UGfEvVaMnx6XoaX6YzExlG4WAlkCg,20182
+piptools/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+piptools/scripts/compile.py,sha256=wrMT0IDmlfK9Q2BY6zwiDoWMEaGond86NEMIclKPcBc,15385
+piptools/scripts/sync.py,sha256=bySckxKFNPfWxbZtFA5vkgfAHjAS7Sqdhi1DuG2PVq4,6383
+pip_tools-5.5.0.dist-info/LICENSE,sha256=3a_52MBuKjuNIlTVi9c-oEzjQZfYDMMrCXlIXgZmX5o,1500
+pip_tools-5.5.0.dist-info/METADATA,sha256=s5H4BeCtxh9SskntX3JnSJP8IuPycf2780T0DMpBdPY,18712
+pip_tools-5.5.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
+pip_tools-5.5.0.dist-info/entry_points.txt,sha256=kxgyoCOYYMl9wcS2pOl6ruVZTy1h9Sbk8SUJ1gifJ8s,99
+pip_tools-5.5.0.dist-info/top_level.txt,sha256=_16nvfow-EENN1JQ_WjY7bFroJvb5IYH5xLIHc3kd7U,9
+pip_tools-5.5.0.dist-info/RECORD,,
diff --git a/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/WHEEL b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/WHEEL
new file mode 100644
index 0000000000..01b8fc7d4a
--- /dev/null
+++ b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/entry_points.txt b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000..383fce0bf0
--- /dev/null
+++ b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/entry_points.txt
@@ -0,0 +1,4 @@
+[console_scripts]
+pip-compile = piptools.scripts.compile:cli
+pip-sync = piptools.scripts.sync:cli
+
diff --git a/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/top_level.txt b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..9f14c19c7f
--- /dev/null
+++ b/third_party/python/pip_tools/pip_tools-5.5.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+piptools
diff --git a/third_party/python/pip_tools/piptools/__init__.py b/third_party/python/pip_tools/piptools/__init__.py
new file mode 100644
index 0000000000..9f0c95aa56
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/__init__.py
@@ -0,0 +1,11 @@
+import locale
+
+from piptools.click import secho
+
+# Needed for locale.getpreferredencoding(False) to work
+# in pip._internal.utils.encoding.auto_decode
+try:
+ locale.setlocale(locale.LC_ALL, "")
+except locale.Error as e: # pragma: no cover
+ # setlocale can apparently crash if locale are uninitialized
+ secho("Ignoring error when setting locale: {}".format(e), fg="red")
diff --git a/third_party/python/pip_tools/piptools/__main__.py b/third_party/python/pip_tools/piptools/__main__.py
new file mode 100644
index 0000000000..2d8b75e85d
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/__main__.py
@@ -0,0 +1,17 @@
+import click
+
+from piptools.scripts import compile, sync
+
+
+@click.group()
+def cli():
+ pass
+
+
+cli.add_command(compile.cli, "compile")
+cli.add_command(sync.cli, "sync")
+
+
+# Enable ``python -m piptools ...``.
+if __name__ == "__main__": # pragma: no branch
+ cli()
diff --git a/third_party/python/pip_tools/piptools/_compat/__init__.py b/third_party/python/pip_tools/piptools/_compat/__init__.py
new file mode 100644
index 0000000000..de28628db2
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/__init__.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+# flake8: noqa
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import errno
+import os
+
+from pip._vendor import six
+
+from .pip_compat import PIP_VERSION, parse_requirements
+
+if six.PY2:
+ from .tempfile import TemporaryDirectory
+else:
+ from tempfile import TemporaryDirectory
+
+
+def makedirs(name, mode=0o777, exist_ok=False):
+ if six.PY2:
+ try:
+ os.makedirs(name, mode)
+ except OSError as e:
+ if not exist_ok or e.errno != errno.EEXIST:
+ raise
+ else:
+ os.makedirs(name, mode, exist_ok)
diff --git a/third_party/python/pip_tools/piptools/_compat/contextlib.py b/third_party/python/pip_tools/piptools/_compat/contextlib.py
new file mode 100644
index 0000000000..04039ccb01
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/contextlib.py
@@ -0,0 +1,18 @@
+# Ported from python 3.7 contextlib.py
+class nullcontext(object):
+ """Context manager that does no additional processing.
+ Used as a stand-in for a normal context manager, when a particular
+ block of code is only sometimes used with a normal context manager:
+ cm = optional_cm if condition else nullcontext()
+ with cm:
+ # Perform operation, using optional_cm if condition is True
+ """
+
+ def __init__(self, enter_result=None):
+ self.enter_result = enter_result
+
+ def __enter__(self):
+ return self.enter_result
+
+ def __exit__(self, *excinfo):
+ pass
diff --git a/third_party/python/pip_tools/piptools/_compat/pip_compat.py b/third_party/python/pip_tools/piptools/_compat/pip_compat.py
new file mode 100644
index 0000000000..6cd24a0ff9
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/pip_compat.py
@@ -0,0 +1,18 @@
+# -*- coding=utf-8 -*-
+from __future__ import absolute_import
+
+import pip
+from pip._internal.req import parse_requirements as _parse_requirements
+from pip._internal.req.constructors import install_req_from_parsed_requirement
+from pip._vendor.packaging.version import parse as parse_version
+
+PIP_VERSION = tuple(map(int, parse_version(pip.__version__).base_version.split(".")))
+
+
+def parse_requirements(
+ filename, session, finder=None, options=None, constraint=False, isolated=False
+):
+ for parsed_req in _parse_requirements(
+ filename, session, finder=finder, options=options, constraint=constraint
+ ):
+ yield install_req_from_parsed_requirement(parsed_req, isolated=isolated)
diff --git a/third_party/python/pip_tools/piptools/_compat/tempfile.py b/third_party/python/pip_tools/piptools/_compat/tempfile.py
new file mode 100644
index 0000000000..dc7e9ef997
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/tempfile.py
@@ -0,0 +1,88 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function
+
+import os as _os
+import sys as _sys
+import warnings as _warnings
+from tempfile import mkdtemp
+
+
+class TemporaryDirectory(object):
+ """Create and return a temporary directory. This has the same
+ behavior as mkdtemp but can be used as a context manager. For
+ example:
+
+ with TemporaryDirectory() as tmpdir:
+ ...
+
+ Upon exiting the context, the directory and everything contained
+ in it are removed.
+ """
+
+ def __init__(self, suffix="", prefix="tmp", dir=None):
+ self._closed = False
+ self.name = None # Handle mkdtemp raising an exception
+ self.name = mkdtemp(suffix, prefix, dir)
+
+ def __repr__(self):
+ return "<{} {!r}>".format(self.__class__.__name__, self.name)
+
+ def __enter__(self):
+ return self.name
+
+ def cleanup(self):
+ if self.name and not self._closed:
+ try:
+ self._rmtree(self.name)
+ except (TypeError, AttributeError) as ex:
+ # Issue #10188: Emit a warning on stderr
+ # if the directory could not be cleaned
+ # up due to missing globals
+ if "None" not in str(ex):
+ raise
+ print(
+ "ERROR: {!r} while cleaning up {!r}".format(ex, self),
+ file=_sys.stderr,
+ )
+ return
+ self._closed = True
+
+ def __exit__(self, exc, value, tb):
+ self.cleanup()
+
+ def __del__(self):
+ # Issue a ResourceWarning if implicit cleanup needed
+ self.cleanup()
+
+ # XXX (ncoghlan): The following code attempts to make
+ # this class tolerant of the module nulling out process
+ # that happens during CPython interpreter shutdown
+ # Alas, it doesn't actually manage it. See issue #10188
+ _listdir = staticmethod(_os.listdir)
+ _path_join = staticmethod(_os.path.join)
+ _isdir = staticmethod(_os.path.isdir)
+ _islink = staticmethod(_os.path.islink)
+ _remove = staticmethod(_os.remove)
+ _rmdir = staticmethod(_os.rmdir)
+ _warn = _warnings.warn
+
+ def _rmtree(self, path):
+ # Essentially a stripped down version of shutil.rmtree. We can't
+ # use globals because they may be None'ed out at shutdown.
+ for name in self._listdir(path):
+ fullname = self._path_join(path, name)
+ try:
+ isdir = self._isdir(fullname) and not self._islink(fullname)
+ except OSError:
+ isdir = False
+ if isdir:
+ self._rmtree(fullname)
+ else:
+ try:
+ self._remove(fullname)
+ except OSError:
+ pass
+ try:
+ self._rmdir(path)
+ except OSError:
+ pass
diff --git a/third_party/python/pip_tools/piptools/cache.py b/third_party/python/pip_tools/piptools/cache.py
new file mode 100644
index 0000000000..301d38bd52
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/cache.py
@@ -0,0 +1,173 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import errno
+import json
+import os
+import platform
+import sys
+
+from pip._vendor.packaging.requirements import Requirement
+
+from ._compat import makedirs
+from .exceptions import PipToolsError
+from .utils import as_tuple, key_from_req, lookup_table
+
+_PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"}
+
+
+def _implementation_name():
+ """similar to PEP 425, however the minor version is separated from the
+ major to differentation "3.10" and "31.0".
+ """
+ implementation_name = platform.python_implementation().lower()
+ implementation = _PEP425_PY_TAGS.get(implementation_name, "??")
+ return "{}{}.{}".format(implementation, *sys.version_info)
+
+
+class CorruptCacheError(PipToolsError):
+ def __init__(self, path):
+ self.path = path
+
+ def __str__(self):
+ lines = [
+ "The dependency cache seems to have been corrupted.",
+ "Inspect, or delete, the following file:",
+ " {}".format(self.path),
+ ]
+ return os.linesep.join(lines)
+
+
+def read_cache_file(cache_file_path):
+ with open(cache_file_path, "r") as cache_file:
+ try:
+ doc = json.load(cache_file)
+ except ValueError:
+ raise CorruptCacheError(cache_file_path)
+
+ # Check version and load the contents
+ if doc["__format__"] != 1:
+ raise ValueError("Unknown cache file format")
+ return doc["dependencies"]
+
+
+class DependencyCache(object):
+ """
+ Creates a new persistent dependency cache for the current Python version.
+ The cache file is written to the appropriate user cache dir for the
+ current platform, i.e.
+
+ ~/.cache/pip-tools/depcache-pyX.Y.json
+
+ Where py indicates the Python implementation.
+ Where X.Y indicates the Python version.
+ """
+
+ def __init__(self, cache_dir):
+ makedirs(cache_dir, exist_ok=True)
+ cache_filename = "depcache-{}.json".format(_implementation_name())
+
+ self._cache_file = os.path.join(cache_dir, cache_filename)
+ self._cache = None
+
+ @property
+ def cache(self):
+ """
+ The dictionary that is the actual in-memory cache. This property
+ lazily loads the cache from disk.
+ """
+ if self._cache is None:
+ self.read_cache()
+ return self._cache
+
+ def as_cache_key(self, ireq):
+ """
+ Given a requirement, return its cache key. This behavior is a little weird
+ in order to allow backwards compatibility with cache files. For a requirement
+ without extras, this will return, for example:
+
+ ("ipython", "2.1.0")
+
+ For a requirement with extras, the extras will be comma-separated and appended
+ to the version, inside brackets, like so:
+
+ ("ipython", "2.1.0[nbconvert,notebook]")
+ """
+ name, version, extras = as_tuple(ireq)
+ if not extras:
+ extras_string = ""
+ else:
+ extras_string = "[{}]".format(",".join(extras))
+ return name, "{}{}".format(version, extras_string)
+
+ def read_cache(self):
+ """Reads the cached contents into memory."""
+ try:
+ self._cache = read_cache_file(self._cache_file)
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ self._cache = {}
+
+ def write_cache(self):
+ """Writes the cache to disk as JSON."""
+ doc = {"__format__": 1, "dependencies": self._cache}
+ with open(self._cache_file, "w") as f:
+ json.dump(doc, f, sort_keys=True)
+
+ def clear(self):
+ self._cache = {}
+ self.write_cache()
+
+ def __contains__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return pkgversion_and_extras in self.cache.get(pkgname, {})
+
+ def __getitem__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return self.cache[pkgname][pkgversion_and_extras]
+
+ def __setitem__(self, ireq, values):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ self.cache.setdefault(pkgname, {})
+ self.cache[pkgname][pkgversion_and_extras] = values
+ self.write_cache()
+
+ def reverse_dependencies(self, ireqs):
+ """
+ Returns a lookup table of reverse dependencies for all the given ireqs.
+
+ Since this is all static, it only works if the dependency cache
+ contains the complete data, otherwise you end up with a partial view.
+ This is typically no problem if you use this function after the entire
+ dependency tree is resolved.
+ """
+ ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs]
+ return self._reverse_dependencies(ireqs_as_cache_values)
+
+ def _reverse_dependencies(self, cache_keys):
+ """
+ Returns a lookup table of reverse dependencies for all the given cache keys.
+
+ Example input:
+
+ [('pep8', '1.5.7'),
+ ('flake8', '2.4.0'),
+ ('mccabe', '0.3'),
+ ('pyflakes', '0.8.1')]
+
+ Example output:
+
+ {'pep8': ['flake8'],
+ 'flake8': [],
+ 'mccabe': ['flake8'],
+ 'pyflakes': ['flake8']}
+
+ """
+ # First, collect all the dependencies into a sequence of (parent, child)
+ # tuples, like [('flake8', 'pep8'), ('flake8', 'mccabe'), ...]
+ return lookup_table(
+ (key_from_req(Requirement(dep_name)), name)
+ for name, version_and_extras in cache_keys
+ for dep_name in self.cache[name][version_and_extras]
+ )
diff --git a/third_party/python/pip_tools/piptools/click.py b/third_party/python/pip_tools/piptools/click.py
new file mode 100644
index 0000000000..86f1612c6a
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/click.py
@@ -0,0 +1,6 @@
+from __future__ import absolute_import
+
+import click
+from click import * # noqa
+
+click.disable_unicode_literals_warning = True
diff --git a/third_party/python/pip_tools/piptools/exceptions.py b/third_party/python/pip_tools/piptools/exceptions.py
new file mode 100644
index 0000000000..5278972741
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/exceptions.py
@@ -0,0 +1,66 @@
+from pip._internal.utils.misc import redact_auth_from_url
+
+
+class PipToolsError(Exception):
+ pass
+
+
+class NoCandidateFound(PipToolsError):
+ def __init__(self, ireq, candidates_tried, finder):
+ self.ireq = ireq
+ self.candidates_tried = candidates_tried
+ self.finder = finder
+
+ def __str__(self):
+ versions = []
+ pre_versions = []
+
+ for candidate in sorted(self.candidates_tried):
+ version = str(candidate.version)
+ if candidate.version.is_prerelease:
+ pre_versions.append(version)
+ else:
+ versions.append(version)
+
+ lines = ["Could not find a version that matches {}".format(self.ireq)]
+
+ if versions:
+ lines.append("Tried: {}".format(", ".join(versions)))
+
+ if pre_versions:
+ if self.finder.allow_all_prereleases:
+ line = "Tried"
+ else:
+ line = "Skipped"
+
+ line += " pre-versions: {}".format(", ".join(pre_versions))
+ lines.append(line)
+
+ if versions or pre_versions:
+ lines.append(
+ "There are incompatible versions in the resolved dependencies:"
+ )
+ source_ireqs = getattr(self.ireq, "_source_ireqs", [])
+ lines.extend(" {}".format(ireq) for ireq in source_ireqs)
+ else:
+ redacted_urls = tuple(
+ redact_auth_from_url(url) for url in self.finder.index_urls
+ )
+ lines.append("No versions found")
+ lines.append(
+ "{} {} reachable?".format(
+ "Were" if len(redacted_urls) > 1 else "Was",
+ " or ".join(redacted_urls),
+ )
+ )
+ return "\n".join(lines)
+
+
+class IncompatibleRequirements(PipToolsError):
+ def __init__(self, ireq_a, ireq_b):
+ self.ireq_a = ireq_a
+ self.ireq_b = ireq_b
+
+ def __str__(self):
+ message = "Incompatible requirements found: {} and {}"
+ return message.format(self.ireq_a, self.ireq_b)
diff --git a/third_party/python/pip_tools/piptools/locations.py b/third_party/python/pip_tools/piptools/locations.py
new file mode 100644
index 0000000000..9ca0ffe436
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/locations.py
@@ -0,0 +1,25 @@
+import os
+from shutil import rmtree
+
+from pip._internal.utils.appdirs import user_cache_dir
+
+from .click import secho
+
+# The user_cache_dir helper comes straight from pip itself
+CACHE_DIR = user_cache_dir("pip-tools")
+
+# NOTE
+# We used to store the cache dir under ~/.pip-tools, which is not the
+# preferred place to store caches for any platform. This has been addressed
+# in pip-tools==1.0.5, but to be good citizens, we point this out explicitly
+# to the user when this directory is still found.
+LEGACY_CACHE_DIR = os.path.expanduser("~/.pip-tools")
+
+if os.path.exists(LEGACY_CACHE_DIR):
+ secho(
+ "Removing old cache dir {} (new cache dir is {})".format(
+ LEGACY_CACHE_DIR, CACHE_DIR
+ ),
+ fg="yellow",
+ )
+ rmtree(LEGACY_CACHE_DIR)
diff --git a/third_party/python/pip_tools/piptools/logging.py b/third_party/python/pip_tools/piptools/logging.py
new file mode 100644
index 0000000000..dcf068f7a2
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/logging.py
@@ -0,0 +1,62 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import contextlib
+import logging
+import sys
+
+from . import click
+
+# Initialise the builtin logging module for other component using it.
+# Ex: pip
+logging.basicConfig()
+
+
+class LogContext(object):
+ stream = sys.stderr
+
+ def __init__(self, verbosity=0, indent_width=2):
+ self.verbosity = verbosity
+ self.current_indent = 0
+ self._indent_width = indent_width
+
+ def log(self, message, *args, **kwargs):
+ kwargs.setdefault("err", True)
+ prefix = " " * self.current_indent
+ click.secho(prefix + message, *args, **kwargs)
+
+ def debug(self, *args, **kwargs):
+ if self.verbosity >= 1:
+ self.log(*args, **kwargs)
+
+ def info(self, *args, **kwargs):
+ if self.verbosity >= 0:
+ self.log(*args, **kwargs)
+
+ def warning(self, *args, **kwargs):
+ kwargs.setdefault("fg", "yellow")
+ self.log(*args, **kwargs)
+
+ def error(self, *args, **kwargs):
+ kwargs.setdefault("fg", "red")
+ self.log(*args, **kwargs)
+
+ def _indent(self):
+ self.current_indent += self._indent_width
+
+ def _dedent(self):
+ self.current_indent -= self._indent_width
+
+ @contextlib.contextmanager
+ def indentation(self):
+ """
+ Increase indentation.
+ """
+ self._indent()
+ try:
+ yield
+ finally:
+ self._dedent()
+
+
+log = LogContext()
diff --git a/third_party/python/pip_tools/piptools/repositories/__init__.py b/third_party/python/pip_tools/piptools/repositories/__init__.py
new file mode 100644
index 0000000000..ce5142e8c6
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/__init__.py
@@ -0,0 +1,3 @@
+# flake8: noqa
+from .local import LocalRequirementsRepository
+from .pypi import PyPIRepository
diff --git a/third_party/python/pip_tools/piptools/repositories/base.py b/third_party/python/pip_tools/piptools/repositories/base.py
new file mode 100644
index 0000000000..54849cb7f8
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/base.py
@@ -0,0 +1,57 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+
+from pip._vendor.six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class BaseRepository(object):
+ def clear_caches(self):
+ """Should clear any caches used by the implementation."""
+
+ @abstractmethod
+ @contextmanager
+ def freshen_build_caches(self):
+ """Should start with fresh build/source caches."""
+
+ @abstractmethod
+ def find_best_match(self, ireq):
+ """
+ Return a Version object that indicates the best match for the given
+ InstallRequirement according to the repository.
+ """
+
+ @abstractmethod
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+
+ @abstractmethod
+ def get_hashes(self, ireq):
+ """
+ Given a pinned InstallRequire, returns a set of hashes that represent
+ all of the files for a given requirement. It is not acceptable for an
+ editable or unpinned requirement to be passed to this function.
+ """
+
+ @abstractmethod
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+ """
+
+ @abstractmethod
+ def copy_ireq_dependencies(self, source, dest):
+ """
+ Notifies the repository that `dest` is a copy of `source`, and so it
+ has the same dependencies. Otherwise, once we prepare an ireq to assign
+ it its name, we would lose track of those dependencies on combining
+ that ireq with others.
+ """
diff --git a/third_party/python/pip_tools/piptools/repositories/local.py b/third_party/python/pip_tools/piptools/repositories/local.py
new file mode 100644
index 0000000000..f185f35c3c
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/local.py
@@ -0,0 +1,97 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from contextlib import contextmanager
+
+from pip._internal.utils.hashes import FAVORITE_HASH
+
+from piptools.utils import as_tuple, key_from_ireq, make_install_requirement
+
+from .base import BaseRepository
+
+
+def ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ """
+ Return True if the given InstallationRequirement is satisfied by the
+ previously encountered version pin.
+ """
+ version = next(iter(existing_pin.req.specifier)).version
+ return ireq.req.specifier.contains(
+ version, prereleases=existing_pin.req.specifier.prereleases
+ )
+
+
+class LocalRequirementsRepository(BaseRepository):
+ """
+ The LocalRequirementsRepository proxied the _real_ repository by first
+ checking if a requirement can be satisfied by existing pins (i.e. the
+ result of a previous compile step).
+
+ In effect, if a requirement can be satisfied with a version pinned in the
+ requirements file, we prefer that version over the best match found in
+ PyPI. This keeps updates to the requirements.txt down to a minimum.
+ """
+
+ def __init__(self, existing_pins, proxied_repository, reuse_hashes=True):
+ self._reuse_hashes = reuse_hashes
+ self.repository = proxied_repository
+ self.existing_pins = existing_pins
+
+ @property
+ def options(self):
+ return self.repository.options
+
+ @property
+ def finder(self):
+ return self.repository.finder
+
+ @property
+ def session(self):
+ return self.repository.session
+
+ @property
+ def DEFAULT_INDEX_URL(self):
+ return self.repository.DEFAULT_INDEX_URL
+
+ def clear_caches(self):
+ self.repository.clear_caches()
+
+ @contextmanager
+ def freshen_build_caches(self):
+ with self.repository.freshen_build_caches():
+ yield
+
+ def find_best_match(self, ireq, prereleases=None):
+ key = key_from_ireq(ireq)
+ existing_pin = self.existing_pins.get(key)
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ project, version, _ = as_tuple(existing_pin)
+ return make_install_requirement(
+ project, version, ireq.extras, constraint=ireq.constraint
+ )
+ else:
+ return self.repository.find_best_match(ireq, prereleases)
+
+ def get_dependencies(self, ireq):
+ return self.repository.get_dependencies(ireq)
+
+ def get_hashes(self, ireq):
+ existing_pin = self._reuse_hashes and self.existing_pins.get(
+ key_from_ireq(ireq)
+ )
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ hashes = existing_pin.hash_options
+ hexdigests = hashes.get(FAVORITE_HASH)
+ if hexdigests:
+ return {
+ ":".join([FAVORITE_HASH, hexdigest]) for hexdigest in hexdigests
+ }
+ return self.repository.get_hashes(ireq)
+
+ @contextmanager
+ def allow_all_wheels(self):
+ with self.repository.allow_all_wheels():
+ yield
+
+ def copy_ireq_dependencies(self, source, dest):
+ self.repository.copy_ireq_dependencies(source, dest)
diff --git a/third_party/python/pip_tools/piptools/repositories/pypi.py b/third_party/python/pip_tools/piptools/repositories/pypi.py
new file mode 100644
index 0000000000..7a988bfc1f
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/pypi.py
@@ -0,0 +1,531 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import collections
+import hashlib
+import itertools
+import logging
+import os
+from contextlib import contextmanager
+from shutil import rmtree
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli.progress_bars import BAR_TYPES
+from pip._internal.commands import create_command
+from pip._internal.models.index import PackageIndex, PyPI
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import get_requirement_tracker
+from pip._internal.utils.hashes import FAVORITE_HASH
+from pip._internal.utils.logging import indent_log, setup_logging
+from pip._internal.utils.misc import normalize_path
+from pip._internal.utils.temp_dir import TempDirectory, global_tempdir_manager
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._vendor.requests import RequestException
+
+from .._compat import PIP_VERSION, TemporaryDirectory, contextlib, makedirs
+from ..click import progressbar
+from ..exceptions import NoCandidateFound
+from ..logging import log
+from ..utils import (
+ as_tuple,
+ fs_str,
+ is_pinned_requirement,
+ is_url_requirement,
+ lookup_table,
+ make_install_requirement,
+)
+from .base import BaseRepository
+
+FILE_CHUNK_SIZE = 4096
+FileStream = collections.namedtuple("FileStream", "stream size")
+
+
+class PyPIRepository(BaseRepository):
+ DEFAULT_INDEX_URL = PyPI.simple_url
+ HASHABLE_PACKAGE_TYPES = {"bdist_wheel", "sdist"}
+
+ """
+ The PyPIRepository will use the provided Finder instance to lookup
+ packages. Typically, it looks up packages on PyPI (the default implicit
+ config), but any other PyPI mirror can be used if index_urls is
+ changed/configured on the Finder.
+ """
+
+ def __init__(self, pip_args, cache_dir):
+ # Use pip's parser for pip.conf management and defaults.
+ # General options (find_links, index_url, extra_index_url, trusted_host,
+ # and pre) are deferred to pip.
+ self.command = create_command("install")
+ extra_pip_args = (
+ []
+ if PIP_VERSION[:2] <= (20, 2)
+ else ["--use-deprecated", "legacy-resolver"]
+ )
+ self.options, _ = self.command.parse_args(pip_args + extra_pip_args)
+ if self.options.cache_dir:
+ self.options.cache_dir = normalize_path(self.options.cache_dir)
+
+ self.options.require_hashes = False
+ self.options.ignore_dependencies = False
+
+ self.session = self.command._build_session(self.options)
+ self.finder = self.command._build_package_finder(
+ options=self.options, session=self.session
+ )
+
+ # Caches
+ # stores project_name => InstallationCandidate mappings for all
+ # versions reported by PyPI, so we only have to ask once for each
+ # project
+ self._available_candidates_cache = {}
+
+ # stores InstallRequirement => list(InstallRequirement) mappings
+ # of all secondary dependencies for the given requirement, so we
+ # only have to go to disk once for each requirement
+ self._dependencies_cache = {}
+
+ # Setup file paths
+ self._build_dir = None
+ self._source_dir = None
+ self._cache_dir = normalize_path(cache_dir)
+ self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs"))
+ if PIP_VERSION[:2] <= (20, 2):
+ self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels"))
+
+ self._setup_logging()
+
+ @contextmanager
+ def freshen_build_caches(self):
+ """
+ Start with fresh build/source caches. Will remove any old build
+ caches from disk automatically.
+ """
+ self._build_dir = TemporaryDirectory(fs_str("build"))
+ self._source_dir = TemporaryDirectory(fs_str("source"))
+ try:
+ yield
+ finally:
+ self._build_dir.cleanup()
+ self._build_dir = None
+ self._source_dir.cleanup()
+ self._source_dir = None
+
+ @property
+ def build_dir(self):
+ return self._build_dir.name if self._build_dir else None
+
+ @property
+ def source_dir(self):
+ return self._source_dir.name if self._source_dir else None
+
+ def clear_caches(self):
+ rmtree(self._download_dir, ignore_errors=True)
+ if PIP_VERSION[:2] <= (20, 2):
+ rmtree(self._wheel_download_dir, ignore_errors=True)
+
+ def find_all_candidates(self, req_name):
+ if req_name not in self._available_candidates_cache:
+ candidates = self.finder.find_all_candidates(req_name)
+ self._available_candidates_cache[req_name] = candidates
+ return self._available_candidates_cache[req_name]
+
+ def find_best_match(self, ireq, prereleases=None):
+ """
+ Returns a Version object that indicates the best match for the given
+ InstallRequirement according to the external repository.
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ return ireq # return itself as the best match
+
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = ireq.specifier.filter(
+ (candidate.version for candidate in all_candidates), prereleases=prereleases
+ )
+
+ matching_candidates = list(
+ itertools.chain.from_iterable(
+ candidates_by_version[ver] for ver in matching_versions
+ )
+ )
+ if not matching_candidates:
+ raise NoCandidateFound(ireq, all_candidates, self.finder)
+
+ evaluator = self.finder.make_candidate_evaluator(ireq.name)
+ best_candidate_result = evaluator.compute_best_candidate(matching_candidates)
+ best_candidate = best_candidate_result.best_candidate
+
+ # Turn the candidate into a pinned InstallRequirement
+ return make_install_requirement(
+ best_candidate.name,
+ best_candidate.version,
+ ireq.extras,
+ constraint=ireq.constraint,
+ )
+
+ def resolve_reqs(self, download_dir, ireq, wheel_cache):
+ with get_requirement_tracker() as req_tracker, TempDirectory(
+ kind="resolver"
+ ) as temp_dir, indent_log():
+ preparer_kwargs = dict(
+ temp_build_dir=temp_dir,
+ options=self.options,
+ req_tracker=req_tracker,
+ session=self.session,
+ finder=self.finder,
+ use_user_site=False,
+ download_dir=download_dir,
+ )
+ if PIP_VERSION[:2] <= (20, 2):
+ preparer_kwargs["wheel_download_dir"] = self._wheel_download_dir
+ preparer = self.command.make_requirement_preparer(**preparer_kwargs)
+
+ reqset = RequirementSet()
+ if PIP_VERSION[:2] <= (20, 1):
+ ireq.is_direct = True
+ else:
+ ireq.user_supplied = True
+ reqset.add_requirement(ireq)
+
+ resolver = self.command.make_resolver(
+ preparer=preparer,
+ finder=self.finder,
+ options=self.options,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ ignore_installed=True,
+ ignore_requires_python=False,
+ force_reinstall=False,
+ upgrade_strategy="to-satisfy-only",
+ )
+ results = resolver._resolve_one(reqset, ireq)
+ if not ireq.prepared:
+ # If still not prepared, e.g. a constraint, do enough to assign
+ # the ireq a name:
+ if PIP_VERSION[:2] <= (20, 2):
+ resolver._get_abstract_dist_for(ireq)
+ else:
+ resolver._get_dist_for(ireq)
+
+ return set(results)
+
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+ if not (
+ ireq.editable or is_url_requirement(ireq) or is_pinned_requirement(ireq)
+ ):
+ raise TypeError(
+ "Expected url, pinned or editable InstallRequirement, got {}".format(
+ ireq
+ )
+ )
+
+ if ireq not in self._dependencies_cache:
+ if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)):
+ # No download_dir for locally available editable requirements.
+ # If a download_dir is passed, pip will unnecessarily archive
+ # the entire source directory
+ download_dir = None
+ elif ireq.link and ireq.link.is_vcs:
+ # No download_dir for VCS sources. This also works around pip
+ # using git-checkout-index, which gets rid of the .git dir.
+ download_dir = None
+ else:
+ download_dir = self._get_download_path(ireq)
+ makedirs(download_dir, exist_ok=True)
+ if PIP_VERSION[:2] <= (20, 2):
+ makedirs(self._wheel_download_dir, exist_ok=True)
+
+ with global_tempdir_manager():
+ wheel_cache = WheelCache(self._cache_dir, self.options.format_control)
+ self._dependencies_cache[ireq] = self.resolve_reqs(
+ download_dir, ireq, wheel_cache
+ )
+
+ return self._dependencies_cache[ireq]
+
+ def copy_ireq_dependencies(self, source, dest):
+ try:
+ self._dependencies_cache[dest] = self._dependencies_cache[source]
+ except KeyError:
+ # `source` may not be in cache yet.
+ pass
+
+ def _get_project(self, ireq):
+ """
+ Return a dict of a project info from PyPI JSON API for a given
+ InstallRequirement. Return None on HTTP/JSON error or if a package
+ is not found on PyPI server.
+
+ API reference: https://warehouse.readthedocs.io/api-reference/json/
+ """
+ package_indexes = (
+ PackageIndex(url=index_url, file_storage_domain="")
+ for index_url in self.finder.search_scope.index_urls
+ )
+ for package_index in package_indexes:
+ url = "{url}/{name}/json".format(url=package_index.pypi_url, name=ireq.name)
+ try:
+ response = self.session.get(url)
+ except RequestException as e:
+ log.debug(
+ "Fetch package info from PyPI failed: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+
+ # Skip this PyPI server, because there is no package
+ # or JSON API might be not supported
+ if response.status_code == 404:
+ continue
+
+ try:
+ data = response.json()
+ except ValueError as e:
+ log.debug(
+ "Cannot parse JSON response from PyPI: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+ return data
+ return None
+
+ def _get_download_path(self, ireq):
+ """
+ Determine the download dir location in a way which avoids name
+ collisions.
+ """
+ if ireq.link:
+ salt = hashlib.sha224(ireq.link.url_without_fragment.encode()).hexdigest()
+ # Nest directories to avoid running out of top level dirs on some FS
+ # (see pypi _get_cache_path_parts, which inspired this)
+ salt = [salt[:2], salt[2:4], salt[4:6], salt[6:]]
+ return os.path.join(self._download_dir, *salt)
+ else:
+ return self._download_dir
+
+ def get_hashes(self, ireq):
+ """
+ Given an InstallRequirement, return a set of hashes that represent all
+ of the files for a given requirement. Unhashable requirements return an
+ empty set. Unpinned requirements raise a TypeError.
+ """
+
+ if ireq.link:
+ link = ireq.link
+
+ if link.is_vcs or (link.is_file and link.is_existing_dir()):
+ # Return empty set for unhashable requirements.
+ # Unhashable logic modeled on pip's
+ # RequirementPreparer.prepare_linked_requirement
+ return set()
+
+ if is_url_requirement(ireq):
+ # Directly hash URL requirements.
+ # URL requirements may have been previously downloaded and cached
+ # locally by self.resolve_reqs()
+ cached_path = os.path.join(self._get_download_path(ireq), link.filename)
+ if os.path.exists(cached_path):
+ cached_link = Link(path_to_url(cached_path))
+ else:
+ cached_link = link
+ return {self._get_file_hash(cached_link)}
+
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected pinned requirement, got {}".format(ireq))
+
+ log.debug(ireq.name)
+
+ with log.indentation():
+ hashes = self._get_hashes_from_pypi(ireq)
+ if hashes is None:
+ log.log("Couldn't get hashes from PyPI, fallback to hashing files")
+ return self._get_hashes_from_files(ireq)
+
+ return hashes
+
+ def _get_hashes_from_pypi(self, ireq):
+ """
+ Return a set of hashes from PyPI JSON API for a given InstallRequirement.
+ Return None if fetching data is failed or missing digests.
+ """
+ project = self._get_project(ireq)
+ if project is None:
+ return None
+
+ _, version, _ = as_tuple(ireq)
+
+ try:
+ release_files = project["releases"][version]
+ except KeyError:
+ log.debug("Missing release files on PyPI")
+ return None
+
+ try:
+ hashes = {
+ "{algo}:{digest}".format(
+ algo=FAVORITE_HASH, digest=file_["digests"][FAVORITE_HASH]
+ )
+ for file_ in release_files
+ if file_["packagetype"] in self.HASHABLE_PACKAGE_TYPES
+ }
+ except KeyError:
+ log.debug("Missing digests of release files on PyPI")
+ return None
+
+ return hashes
+
+ def _get_hashes_from_files(self, ireq):
+ """
+ Return a set of hashes for all release files of a given InstallRequirement.
+ """
+ # We need to get all of the candidates that match our current version
+ # pin, these will represent all of the files that could possibly
+ # satisfy this constraint.
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = list(
+ ireq.specifier.filter((candidate.version for candidate in all_candidates))
+ )
+ matching_candidates = candidates_by_version[matching_versions[0]]
+
+ return {
+ self._get_file_hash(candidate.link) for candidate in matching_candidates
+ }
+
+ def _get_file_hash(self, link):
+ log.debug("Hashing {}".format(link.show_url))
+ h = hashlib.new(FAVORITE_HASH)
+ with open_local_or_remote_file(link, self.session) as f:
+ # Chunks to iterate
+ chunks = iter(lambda: f.stream.read(FILE_CHUNK_SIZE), b"")
+
+ # Choose a context manager depending on verbosity
+ if log.verbosity >= 1:
+ iter_length = f.size / FILE_CHUNK_SIZE if f.size else None
+ bar_template = "{prefix} |%(bar)s| %(info)s".format(
+ prefix=" " * log.current_indent
+ )
+ context_manager = progressbar(
+ chunks,
+ length=iter_length,
+ # Make it look like default pip progress bar
+ fill_char="â–ˆ",
+ empty_char=" ",
+ bar_template=bar_template,
+ width=32,
+ )
+ else:
+ context_manager = contextlib.nullcontext(chunks)
+
+ # Iterate over the chosen context manager
+ with context_manager as bar:
+ for chunk in bar:
+ h.update(chunk)
+ return ":".join([FAVORITE_HASH, h.hexdigest()])
+
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+
+ This also saves the candidate cache and set a new one, or else the results from
+ the previous non-patched calls will interfere.
+ """
+
+ def _wheel_supported(self, tags=None):
+ # Ignore current platform. Support everything.
+ return True
+
+ def _wheel_support_index_min(self, tags=None):
+ # All wheels are equal priority for sorting.
+ return 0
+
+ original_wheel_supported = Wheel.supported
+ original_support_index_min = Wheel.support_index_min
+ original_cache = self._available_candidates_cache
+
+ Wheel.supported = _wheel_supported
+ Wheel.support_index_min = _wheel_support_index_min
+ self._available_candidates_cache = {}
+
+ try:
+ yield
+ finally:
+ Wheel.supported = original_wheel_supported
+ Wheel.support_index_min = original_support_index_min
+ self._available_candidates_cache = original_cache
+
+ def _setup_logging(self):
+ """
+ Setup pip's logger. Ensure pip is verbose same as pip-tools and sync
+ pip's log stream with LogContext.stream.
+ """
+ # Default pip's logger is noisy, so decrease it's verbosity
+ setup_logging(
+ verbosity=log.verbosity - 1,
+ no_color=self.options.no_color,
+ user_log_file=self.options.log,
+ )
+
+ # Sync pip's console handler stream with LogContext.stream
+ logger = logging.getLogger()
+ for handler in logger.handlers:
+ if handler.name == "console": # pragma: no branch
+ handler.stream = log.stream
+ break
+ else: # pragma: no cover
+ # There is always a console handler. This warning would be a signal that
+ # this block should be removed/revisited, because of pip possibly
+ # refactored-out logging config.
+ log.warning("Couldn't find a 'console' logging handler")
+
+ # Sync pip's progress bars stream with LogContext.stream
+ for bar_cls in itertools.chain(*BAR_TYPES.values()):
+ bar_cls.file = log.stream
+
+
+@contextmanager
+def open_local_or_remote_file(link, session):
+ """
+ Open local or remote file for reading.
+
+ :type link: pip.index.Link
+ :type session: requests.Session
+ :raises ValueError: If link points to a local directory.
+ :return: a context manager to a FileStream with the opened file-like object
+ """
+ url = link.url_without_fragment
+
+ if link.is_file:
+ # Local URL
+ local_path = url_to_path(url)
+ if os.path.isdir(local_path):
+ raise ValueError("Cannot open directory for read: {}".format(url))
+ else:
+ st = os.stat(local_path)
+ with open(local_path, "rb") as local_file:
+ yield FileStream(stream=local_file, size=st.st_size)
+ else:
+ # Remote URL
+ headers = {"Accept-Encoding": "identity"}
+ response = session.get(url, headers=headers, stream=True)
+
+ # Content length must be int or None
+ try:
+ content_length = int(response.headers["content-length"])
+ except (ValueError, KeyError, TypeError):
+ content_length = None
+
+ try:
+ yield FileStream(stream=response.raw, size=content_length)
+ finally:
+ response.close()
diff --git a/third_party/python/pip_tools/piptools/resolver.py b/third_party/python/pip_tools/piptools/resolver.py
new file mode 100644
index 0000000000..d46a04a9e3
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/resolver.py
@@ -0,0 +1,405 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import copy
+from functools import partial
+from itertools import chain, count, groupby
+
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.req.req_tracker import update_env_context_manager
+
+from . import click
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ format_requirement,
+ format_specifier,
+ is_pinned_requirement,
+ is_url_requirement,
+ key_from_ireq,
+)
+
+green = partial(click.style, fg="green")
+magenta = partial(click.style, fg="magenta")
+
+
+class RequirementSummary(object):
+ """
+ Summary of a requirement's properties for comparison purposes.
+ """
+
+ def __init__(self, ireq):
+ self.req = ireq.req
+ self.key = key_from_ireq(ireq)
+ self.extras = frozenset(ireq.extras)
+ self.specifier = ireq.specifier
+
+ def __eq__(self, other):
+ return (
+ self.key == other.key
+ and self.specifier == other.specifier
+ and self.extras == other.extras
+ )
+
+ def __hash__(self):
+ return hash((self.key, self.specifier, self.extras))
+
+ def __str__(self):
+ return repr((self.key, str(self.specifier), sorted(self.extras)))
+
+
+def combine_install_requirements(repository, ireqs):
+ """
+ Return a single install requirement that reflects a combination of
+ all the inputs.
+ """
+ # We will store the source ireqs in a _source_ireqs attribute;
+ # if any of the inputs have this, then use those sources directly.
+ source_ireqs = []
+ for ireq in ireqs:
+ source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
+
+ # Optimization. Don't bother with combination logic.
+ if len(source_ireqs) == 1:
+ return source_ireqs[0]
+
+ # deepcopy the accumulator so as to not modify the inputs
+ combined_ireq = copy.deepcopy(source_ireqs[0])
+ repository.copy_ireq_dependencies(source_ireqs[0], combined_ireq)
+
+ for ireq in source_ireqs[1:]:
+ # NOTE we may be losing some info on dropped reqs here
+ combined_ireq.req.specifier &= ireq.req.specifier
+ if combined_ireq.constraint:
+ # We don't find dependencies for constraint ireqs, so copy them
+ # from non-constraints:
+ repository.copy_ireq_dependencies(ireq, combined_ireq)
+ combined_ireq.constraint &= ireq.constraint
+ # Return a sorted, de-duped tuple of extras
+ combined_ireq.extras = tuple(
+ sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
+ )
+
+ # InstallRequirements objects are assumed to come from only one source, and
+ # so they support only a single comes_from entry. This function breaks this
+ # model. As a workaround, we deterministically choose a single source for
+ # the comes_from entry, and add an extra _source_ireqs attribute to keep
+ # track of multiple sources for use within pip-tools.
+ if len(source_ireqs) > 1:
+ if any(ireq.comes_from is None for ireq in source_ireqs):
+ # None indicates package was directly specified.
+ combined_ireq.comes_from = None
+ else:
+ # Populate the comes_from field from one of the sources.
+ # Requirement input order is not stable, so we need to sort:
+ # We choose the shortest entry in order to keep the printed
+ # representation as concise as possible.
+ combined_ireq.comes_from = min(
+ (ireq.comes_from for ireq in source_ireqs),
+ key=lambda x: (len(str(x)), str(x)),
+ )
+ combined_ireq._source_ireqs = source_ireqs
+ return combined_ireq
+
+
+class Resolver(object):
+ def __init__(
+ self,
+ constraints,
+ repository,
+ cache,
+ prereleases=False,
+ clear_caches=False,
+ allow_unsafe=False,
+ ):
+ """
+ This class resolves a given set of constraints (a collection of
+ InstallRequirement objects) by consulting the given Repository and the
+ DependencyCache.
+ """
+ self.our_constraints = set(constraints)
+ self.their_constraints = set()
+ self.repository = repository
+ self.dependency_cache = cache
+ self.prereleases = prereleases
+ self.clear_caches = clear_caches
+ self.allow_unsafe = allow_unsafe
+ self.unsafe_constraints = set()
+
+ @property
+ def constraints(self):
+ return set(
+ self._group_constraints(chain(self.our_constraints, self.their_constraints))
+ )
+
+ def resolve_hashes(self, ireqs):
+ """
+ Finds acceptable hashes for all of the given InstallRequirements.
+ """
+ log.debug("")
+ log.debug("Generating hashes:")
+ with self.repository.allow_all_wheels(), log.indentation():
+ return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
+
+ def resolve(self, max_rounds=10):
+ """
+ Finds concrete package versions for all the given InstallRequirements
+ and their recursive dependencies. The end result is a flat list of
+ (name, version) tuples. (Or an editable package.)
+
+ Resolves constraints one round at a time, until they don't change
+ anymore. Protects against infinite loops by breaking out after a max
+ number rounds.
+ """
+ if self.clear_caches:
+ self.dependency_cache.clear()
+ self.repository.clear_caches()
+
+ # Ignore existing packages
+ # NOTE: str() wrapping necessary for Python 2/3 compat
+ with update_env_context_manager(PIP_EXISTS_ACTION=str("i")):
+ for current_round in count(start=1): # pragma: no branch
+ if current_round > max_rounds:
+ raise RuntimeError(
+ "No stable configuration of concrete packages "
+ "could be found for the given constraints after "
+ "{max_rounds} rounds of resolving.\n"
+ "This is likely a bug.".format(max_rounds=max_rounds)
+ )
+
+ log.debug("")
+ log.debug(magenta("{:^60}".format("ROUND {}".format(current_round))))
+ # If a package version (foo==2.0) was built in a previous round,
+ # and in this round a different version of foo needs to be built
+ # (i.e. foo==1.0), the directory will exist already, which will
+ # cause a pip build failure. The trick is to start with a new
+ # build cache dir for every round, so this can never happen.
+ with self.repository.freshen_build_caches():
+ has_changed, best_matches = self._resolve_one_round()
+ log.debug("-" * 60)
+ log.debug(
+ "Result of round {}: {}".format(
+ current_round,
+ "not stable" if has_changed else "stable, done",
+ )
+ )
+ if not has_changed:
+ break
+
+ # Only include hard requirements and not pip constraints
+ results = {req for req in best_matches if not req.constraint}
+
+ # Filter out unsafe requirements.
+ self.unsafe_constraints = set()
+ if not self.allow_unsafe:
+ # reverse_dependencies is used to filter out packages that are only
+ # required by unsafe packages. This logic is incomplete, as it would
+ # fail to filter sub-sub-dependencies of unsafe packages. None of the
+ # UNSAFE_PACKAGES currently have any dependencies at all (which makes
+ # sense for installation tools) so this seems sufficient.
+ reverse_dependencies = self.reverse_dependencies(results)
+ for req in results.copy():
+ required_by = reverse_dependencies.get(req.name.lower(), [])
+ if req.name in UNSAFE_PACKAGES or (
+ required_by and all(name in UNSAFE_PACKAGES for name in required_by)
+ ):
+ self.unsafe_constraints.add(req)
+ results.remove(req)
+
+ return results
+
+ def _group_constraints(self, constraints):
+ """
+ Groups constraints (remember, InstallRequirements!) by their key name,
+ and combining their SpecifierSets into a single InstallRequirement per
+ package. For example, given the following constraints:
+
+ Django<1.9,>=1.4.2
+ django~=1.5
+ Flask~=0.7
+
+ This will be combined into a single entry per package:
+
+ django~=1.5,<1.9,>=1.4.2
+ flask~=0.7
+
+ """
+ constraints = list(constraints)
+ for ireq in constraints:
+ if ireq.name is None:
+ # get_dependencies has side-effect of assigning name to ireq
+ # (so we can group by the name below).
+ self.repository.get_dependencies(ireq)
+
+ # Sort first by name, i.e. the groupby key. Then within each group,
+ # sort editables first.
+ # This way, we don't bother with combining editables, since the first
+ # ireq will be editable, if one exists.
+ for _, ireqs in groupby(
+ sorted(constraints, key=(lambda x: (key_from_ireq(x), not x.editable))),
+ key=key_from_ireq,
+ ):
+ yield combine_install_requirements(self.repository, ireqs)
+
+ def _resolve_one_round(self):
+ """
+ Resolves one level of the current constraints, by finding the best
+ match for each package in the repository and adding all requirements
+ for those best package versions. Some of these constraints may be new
+ or updated.
+
+ Returns whether new constraints appeared in this round. If no
+ constraints were added or changed, this indicates a stable
+ configuration.
+ """
+ # Sort this list for readability of terminal output
+ constraints = sorted(self.constraints, key=key_from_ireq)
+
+ log.debug("Current constraints:")
+ with log.indentation():
+ for constraint in constraints:
+ log.debug(str(constraint))
+
+ log.debug("")
+ log.debug("Finding the best candidates:")
+ with log.indentation():
+ best_matches = {self.get_best_match(ireq) for ireq in constraints}
+
+ # Find the new set of secondary dependencies
+ log.debug("")
+ log.debug("Finding secondary dependencies:")
+
+ their_constraints = []
+ with log.indentation():
+ for best_match in best_matches:
+ their_constraints.extend(self._iter_dependencies(best_match))
+ # Grouping constraints to make clean diff between rounds
+ theirs = set(self._group_constraints(their_constraints))
+
+ # NOTE: We need to compare RequirementSummary objects, since
+ # InstallRequirement does not define equality
+ diff = {RequirementSummary(t) for t in theirs} - {
+ RequirementSummary(t) for t in self.their_constraints
+ }
+ removed = {RequirementSummary(t) for t in self.their_constraints} - {
+ RequirementSummary(t) for t in theirs
+ }
+
+ has_changed = len(diff) > 0 or len(removed) > 0
+ if has_changed:
+ log.debug("")
+ log.debug("New dependencies found in this round:")
+ with log.indentation():
+ for new_dependency in sorted(diff, key=key_from_ireq):
+ log.debug("adding {}".format(new_dependency))
+ log.debug("Removed dependencies in this round:")
+ with log.indentation():
+ for removed_dependency in sorted(removed, key=key_from_ireq):
+ log.debug("removing {}".format(removed_dependency))
+
+ # Store the last round's results in the their_constraints
+ self.their_constraints = theirs
+ return has_changed, best_matches
+
+ def get_best_match(self, ireq):
+ """
+ Returns a (pinned or editable) InstallRequirement, indicating the best
+ match to use for the given InstallRequirement (in the form of an
+ InstallRequirement).
+
+ Example:
+ Given the constraint Flask>=0.10, may return Flask==0.10.1 at
+ a certain moment in time.
+
+ Pinned requirements will always return themselves, i.e.
+
+ Flask==0.10.1 => Flask==0.10.1
+
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif is_pinned_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif ireq.constraint:
+ # NOTE: This is not a requirement (yet) and does not need
+ # to be resolved
+ best_match = ireq
+ else:
+ best_match = self.repository.find_best_match(
+ ireq, prereleases=self.prereleases
+ )
+
+ # Format the best match
+ log.debug(
+ "found candidate {} (constraint was {})".format(
+ format_requirement(best_match), format_specifier(ireq)
+ )
+ )
+ best_match.comes_from = ireq.comes_from
+ if hasattr(ireq, "_source_ireqs"):
+ best_match._source_ireqs = ireq._source_ireqs
+ return best_match
+
+ def _iter_dependencies(self, ireq):
+ """
+ Given a pinned, url, or editable InstallRequirement, collects all the
+ secondary dependencies for them, either by looking them up in a local
+ cache, or by reaching out to the repository.
+
+ Editable requirements will never be looked up, as they may have
+ changed at any time.
+ """
+ # Pip does not resolve dependencies of constraints. We skip handling
+ # constraints here as well to prevent the cache from being polluted.
+ # Constraints that are later determined to be dependencies will be
+ # marked as non-constraints in later rounds by
+ # `combine_install_requirements`, and will be properly resolved.
+ # See https://github.com/pypa/pip/
+ # blob/6896dfcd831330c13e076a74624d95fa55ff53f4/src/pip/_internal/
+ # legacy_resolve.py#L325
+ if ireq.constraint:
+ return
+
+ if ireq.editable or is_url_requirement(ireq):
+ for dependency in self.repository.get_dependencies(ireq):
+ yield dependency
+ return
+ elif not is_pinned_requirement(ireq):
+ raise TypeError(
+ "Expected pinned or editable requirement, got {}".format(ireq)
+ )
+
+ # Now, either get the dependencies from the dependency cache (for
+ # speed), or reach out to the external repository to
+ # download and inspect the package version and get dependencies
+ # from there
+ if ireq not in self.dependency_cache:
+ log.debug(
+ "{} not in cache, need to check index".format(format_requirement(ireq)),
+ fg="yellow",
+ )
+ dependencies = self.repository.get_dependencies(ireq)
+ self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies)
+
+ # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
+ dependency_strings = self.dependency_cache[ireq]
+ log.debug(
+ "{:25} requires {}".format(
+ format_requirement(ireq),
+ ", ".join(sorted(dependency_strings, key=lambda s: s.lower())) or "-",
+ )
+ )
+ for dependency_string in dependency_strings:
+ yield install_req_from_line(
+ dependency_string, constraint=ireq.constraint, comes_from=ireq
+ )
+
+ def reverse_dependencies(self, ireqs):
+ non_editable = [
+ ireq for ireq in ireqs if not (ireq.editable or is_url_requirement(ireq))
+ ]
+ return self.dependency_cache.reverse_dependencies(non_editable)
diff --git a/third_party/python/pip_tools/piptools/scripts/__init__.py b/third_party/python/pip_tools/piptools/scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/scripts/__init__.py
diff --git a/third_party/python/pip_tools/piptools/scripts/compile.py b/third_party/python/pip_tools/piptools/scripts/compile.py
new file mode 100644
index 0000000000..ca650e4913
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/scripts/compile.py
@@ -0,0 +1,495 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import os
+import shlex
+import sys
+import tempfile
+import warnings
+
+from click import Command
+from click.utils import safecall
+from pip._internal.commands import create_command
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+
+from .. import click
+from .._compat import parse_requirements
+from ..cache import DependencyCache
+from ..exceptions import PipToolsError
+from ..locations import CACHE_DIR
+from ..logging import log
+from ..repositories import LocalRequirementsRepository, PyPIRepository
+from ..resolver import Resolver
+from ..utils import UNSAFE_PACKAGES, dedup, is_pinned_requirement, key_from_ireq
+from ..writer import OutputWriter
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.in"
+DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt"
+
+
+def _get_default_option(option_name):
+ """
+ Get default value of the pip's option (including option from pip.conf)
+ by a given option name.
+ """
+ install_command = create_command("install")
+ default_values = install_command.parser.get_default_values()
+ return getattr(default_values, option_name)
+
+
+class BaseCommand(Command):
+ _os_args = None
+
+ def parse_args(self, ctx, args):
+ """
+ Override base `parse_args` to store the argument part of `sys.argv`.
+ """
+ self._os_args = set(args)
+ return super(BaseCommand, self).parse_args(ctx, args)
+
+ def has_arg(self, arg_name):
+ """
+ Detect whether a given arg name (including negative counterparts
+ to the arg, e.g. --no-arg) is present in the argument part of `sys.argv`.
+ """
+ command_options = {option.name: option for option in self.params}
+ option = command_options[arg_name]
+ args = set(option.opts + option.secondary_opts)
+ return bool(self._os_args & args)
+
+
+@click.command(
+ cls=BaseCommand, context_settings={"help_option_names": ("-h", "--help")}
+)
+@click.version_option()
+@click.pass_context
+@click.option("-v", "--verbose", count=True, help="Show more output")
+@click.option("-q", "--quiet", count=True, help="Give less output")
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option(
+ "-p",
+ "--pre",
+ is_flag=True,
+ default=None,
+ help="Allow resolving to prereleases (default is not)",
+)
+@click.option(
+ "-r",
+ "--rebuild",
+ is_flag=True,
+ help="Clear any caches upfront, rebuild from scratch",
+)
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+)
+@click.option(
+ "-i",
+ "--index-url",
+ help="Change index URL (defaults to {index_url})".format(
+ index_url=redact_auth_from_url(_get_default_option("index_url"))
+ ),
+)
+@click.option(
+ "--extra-index-url", multiple=True, help="Add additional index URL to search"
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ help="Mark this host as trusted, even though it does not have "
+ "valid or any HTTPS.",
+)
+@click.option(
+ "--header/--no-header",
+ is_flag=True,
+ default=True,
+ help="Add header to generated file",
+)
+@click.option(
+ "--index/--no-index",
+ is_flag=True,
+ default=True,
+ help="DEPRECATED: Add index URL to generated file",
+)
+@click.option(
+ "--emit-trusted-host/--no-emit-trusted-host",
+ is_flag=True,
+ default=True,
+ help="Add trusted host option to generated file",
+)
+@click.option(
+ "--annotate/--no-annotate",
+ is_flag=True,
+ default=True,
+ help="Annotate results, indicating where dependencies come from",
+)
+@click.option(
+ "-U",
+ "--upgrade",
+ is_flag=True,
+ default=False,
+ help="Try to upgrade all dependencies to their latest versions",
+)
+@click.option(
+ "-P",
+ "--upgrade-package",
+ "upgrade_packages",
+ nargs=1,
+ multiple=True,
+ help="Specify particular packages to upgrade.",
+)
+@click.option(
+ "-o",
+ "--output-file",
+ nargs=1,
+ default=None,
+ type=click.File("w+b", atomic=True, lazy=True),
+ help=(
+ "Output file name. Required if more than one input file is given. "
+ "Will be derived from input file otherwise."
+ ),
+)
+@click.option(
+ "--allow-unsafe/--no-allow-unsafe",
+ is_flag=True,
+ default=False,
+ help=(
+ "Pin packages considered unsafe: {}.\n\n"
+ "WARNING: Future versions of pip-tools will enable this behavior by default. "
+ "Use --no-allow-unsafe to keep the old behavior. It is recommended to pass the "
+ "--allow-unsafe now to adapt to the upcoming change.".format(
+ ", ".join(sorted(UNSAFE_PACKAGES))
+ )
+ ),
+)
+@click.option(
+ "--generate-hashes",
+ is_flag=True,
+ default=False,
+ help="Generate pip 8 style hashes in the resulting requirements file.",
+)
+@click.option(
+ "--reuse-hashes/--no-reuse-hashes",
+ is_flag=True,
+ default=True,
+ help=(
+ "Improve the speed of --generate-hashes by reusing the hashes from an "
+ "existing output file."
+ ),
+)
+@click.option(
+ "--max-rounds",
+ default=10,
+ help="Maximum number of rounds before resolving the requirements aborts.",
+)
+@click.argument("src_files", nargs=-1, type=click.Path(exists=True, allow_dash=True))
+@click.option(
+ "--build-isolation/--no-build-isolation",
+ is_flag=True,
+ default=True,
+ help="Enable isolation when building a modern source distribution. "
+ "Build dependencies specified by PEP 518 must be already installed "
+ "if build isolation is disabled.",
+)
+@click.option(
+ "--emit-find-links/--no-emit-find-links",
+ is_flag=True,
+ default=True,
+ help="Add the find-links option to generated file",
+)
+@click.option(
+ "--cache-dir",
+ help="Store the cache data in DIRECTORY.",
+ default=CACHE_DIR,
+ show_default=True,
+ type=click.Path(file_okay=False, writable=True),
+)
+@click.option("--pip-args", help="Arguments to pass directly to the pip command.")
+@click.option(
+ "--emit-index-url/--no-emit-index-url",
+ is_flag=True,
+ default=True,
+ help="Add index URL to generated file",
+)
+def cli(
+ ctx,
+ verbose,
+ quiet,
+ dry_run,
+ pre,
+ rebuild,
+ find_links,
+ index_url,
+ extra_index_url,
+ cert,
+ client_cert,
+ trusted_host,
+ header,
+ index,
+ emit_trusted_host,
+ annotate,
+ upgrade,
+ upgrade_packages,
+ output_file,
+ allow_unsafe,
+ generate_hashes,
+ reuse_hashes,
+ src_files,
+ max_rounds,
+ build_isolation,
+ emit_find_links,
+ cache_dir,
+ pip_args,
+ emit_index_url,
+):
+ """Compiles requirements.txt from requirements.in specs."""
+ log.verbosity = verbose - quiet
+
+ if len(src_files) == 0:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ elif os.path.exists("setup.py"):
+ src_files = ("setup.py",)
+ else:
+ raise click.BadParameter(
+ (
+ "If you do not specify an input file, "
+ "the default is {} or setup.py"
+ ).format(DEFAULT_REQUIREMENTS_FILE)
+ )
+
+ if not output_file:
+ # An output file must be provided for stdin
+ if src_files == ("-",):
+ raise click.BadParameter("--output-file is required if input is from stdin")
+ # Use default requirements output file if there is a setup.py the source file
+ elif src_files == ("setup.py",):
+ file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE
+ # An output file must be provided if there are multiple source files
+ elif len(src_files) > 1:
+ raise click.BadParameter(
+ "--output-file is required if two or more input files are given."
+ )
+ # Otherwise derive the output file from the source file
+ else:
+ base_name = src_files[0].rsplit(".", 1)[0]
+ file_name = base_name + ".txt"
+
+ output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True)
+
+ # Close the file at the end of the context execution
+ ctx.call_on_close(safecall(output_file.close_intelligently))
+
+ if cli.has_arg("index") and cli.has_arg("emit_index_url"):
+ raise click.BadParameter(
+ "--index/--no-index and --emit-index-url/--no-emit-index-url "
+ "are mutually exclusive."
+ )
+ elif cli.has_arg("index"):
+ warnings.warn(
+ "--index and --no-index are deprecated and will be removed "
+ "in future versions. Use --emit-index-url/--no-emit-index-url instead.",
+ category=FutureWarning,
+ )
+ emit_index_url = index
+
+ ###
+ # Setup
+ ###
+
+ right_args = shlex.split(pip_args or "")
+ pip_args = []
+ for link in find_links:
+ pip_args.extend(["-f", link])
+ if index_url:
+ pip_args.extend(["-i", index_url])
+ for extra_index in extra_index_url:
+ pip_args.extend(["--extra-index-url", extra_index])
+ if cert:
+ pip_args.extend(["--cert", cert])
+ if client_cert:
+ pip_args.extend(["--client-cert", client_cert])
+ if pre:
+ pip_args.extend(["--pre"])
+ for host in trusted_host:
+ pip_args.extend(["--trusted-host", host])
+
+ if not build_isolation:
+ pip_args.append("--no-build-isolation")
+ pip_args.extend(right_args)
+
+ repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+
+ # Parse all constraints coming from --upgrade-package/-P
+ upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages)
+ upgrade_install_reqs = {
+ key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen
+ }
+
+ existing_pins_to_upgrade = set()
+
+ # Proxy with a LocalRequirementsRepository if --upgrade is not specified
+ # (= default invocation)
+ if not upgrade and os.path.exists(output_file.name):
+ # Use a temporary repository to ensure outdated(removed) options from
+ # existing requirements.txt wouldn't get into the current repository.
+ tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+ ireqs = parse_requirements(
+ output_file.name,
+ finder=tmp_repository.finder,
+ session=tmp_repository.session,
+ options=tmp_repository.options,
+ )
+
+ # Exclude packages from --upgrade-package/-P from the existing
+ # constraints, and separately gather pins to be upgraded
+ existing_pins = {}
+ for ireq in filter(is_pinned_requirement, ireqs):
+ key = key_from_ireq(ireq)
+ if key in upgrade_install_reqs:
+ existing_pins_to_upgrade.add(key)
+ else:
+ existing_pins[key] = ireq
+ repository = LocalRequirementsRepository(
+ existing_pins, repository, reuse_hashes=reuse_hashes
+ )
+
+ ###
+ # Parsing/collecting initial requirements
+ ###
+
+ constraints = []
+ for src_file in src_files:
+ is_setup_file = os.path.basename(src_file) == "setup.py"
+ if is_setup_file or src_file == "-":
+ # pip requires filenames and not files. Since we want to support
+ # piping from stdin, we need to briefly save the input from stdin
+ # to a temporary file and have pip read that. also used for
+ # reading requirements from install_requires in setup.py.
+ tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ if is_setup_file:
+ from distutils.core import run_setup
+
+ dist = run_setup(src_file)
+ tmpfile.write("\n".join(dist.install_requires))
+ comes_from = "{name} ({filename})".format(
+ name=dist.get_name(), filename=src_file
+ )
+ else:
+ tmpfile.write(sys.stdin.read())
+ comes_from = "-r -"
+ tmpfile.flush()
+ reqs = list(
+ parse_requirements(
+ tmpfile.name,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+ for req in reqs:
+ req.comes_from = comes_from
+ constraints.extend(reqs)
+ else:
+ constraints.extend(
+ parse_requirements(
+ src_file,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+
+ primary_packages = {
+ key_from_ireq(ireq) for ireq in constraints if not ireq.constraint
+ }
+
+ allowed_upgrades = primary_packages | existing_pins_to_upgrade
+ constraints.extend(
+ ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades
+ )
+
+ # Filter out pip environment markers which do not match (PEP496)
+ constraints = [
+ req for req in constraints if req.markers is None or req.markers.evaluate()
+ ]
+
+ log.debug("Using indexes:")
+ with log.indentation():
+ for index_url in dedup(repository.finder.index_urls):
+ log.debug(redact_auth_from_url(index_url))
+
+ if repository.finder.find_links:
+ log.debug("")
+ log.debug("Using links:")
+ with log.indentation():
+ for find_link in dedup(repository.finder.find_links):
+ log.debug(redact_auth_from_url(find_link))
+
+ try:
+ resolver = Resolver(
+ constraints,
+ repository,
+ prereleases=repository.finder.allow_all_prereleases or pre,
+ cache=DependencyCache(cache_dir),
+ clear_caches=rebuild,
+ allow_unsafe=allow_unsafe,
+ )
+ results = resolver.resolve(max_rounds=max_rounds)
+ if generate_hashes:
+ hashes = resolver.resolve_hashes(results)
+ else:
+ hashes = None
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ log.debug("")
+
+ ##
+ # Output
+ ##
+
+ writer = OutputWriter(
+ src_files,
+ output_file,
+ click_ctx=ctx,
+ dry_run=dry_run,
+ emit_header=header,
+ emit_index_url=emit_index_url,
+ emit_trusted_host=emit_trusted_host,
+ annotate=annotate,
+ generate_hashes=generate_hashes,
+ default_index_url=repository.DEFAULT_INDEX_URL,
+ index_urls=repository.finder.index_urls,
+ trusted_hosts=repository.finder.trusted_hosts,
+ format_control=repository.finder.format_control,
+ allow_unsafe=allow_unsafe,
+ find_links=repository.finder.find_links,
+ emit_find_links=emit_find_links,
+ )
+ writer.write(
+ results=results,
+ unsafe_requirements=resolver.unsafe_constraints,
+ markers={
+ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers
+ },
+ hashes=hashes,
+ )
+
+ if dry_run:
+ log.info("Dry-run, so nothing updated.")
diff --git a/third_party/python/pip_tools/piptools/scripts/sync.py b/third_party/python/pip_tools/piptools/scripts/sync.py
new file mode 100644
index 0000000000..9759b302f0
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/scripts/sync.py
@@ -0,0 +1,214 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import itertools
+import os
+import shlex
+import sys
+
+from pip._internal.commands import create_command
+from pip._internal.utils.misc import get_installed_distributions
+
+from .. import click, sync
+from .._compat import parse_requirements
+from ..exceptions import PipToolsError
+from ..logging import log
+from ..repositories import PyPIRepository
+from ..utils import flat_map
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
+
+
+@click.command(context_settings={"help_option_names": ("-h", "--help")})
+@click.version_option()
+@click.option(
+ "-a",
+ "--ask",
+ is_flag=True,
+ help="Show what would happen, then ask whether to continue",
+)
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option("--force", is_flag=True, help="Proceed even if conflicts are found")
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+)
+@click.option("-i", "--index-url", help="Change index URL (defaults to PyPI)")
+@click.option(
+ "--extra-index-url", multiple=True, help="Add additional index URL to search"
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ help="Mark this host as trusted, even though it does not have valid or any HTTPS.",
+)
+@click.option(
+ "--no-index",
+ is_flag=True,
+ help="Ignore package index (only looking at --find-links URLs instead)",
+)
+@click.option("-v", "--verbose", count=True, help="Show more output")
+@click.option("-q", "--quiet", count=True, help="Give less output")
+@click.option(
+ "--user", "user_only", is_flag=True, help="Restrict attention to user directory"
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
+@click.option("--pip-args", help="Arguments to pass directly to pip install.")
+def cli(
+ ask,
+ dry_run,
+ force,
+ find_links,
+ index_url,
+ extra_index_url,
+ trusted_host,
+ no_index,
+ verbose,
+ quiet,
+ user_only,
+ cert,
+ client_cert,
+ src_files,
+ pip_args,
+):
+ """Synchronize virtual environment with requirements.txt."""
+ log.verbosity = verbose - quiet
+
+ if not src_files:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ else:
+ msg = "No requirement files given and no {} found in the current directory"
+ log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))
+ sys.exit(2)
+
+ if any(src_file.endswith(".in") for src_file in src_files):
+ msg = (
+ "Some input files have the .in extension, which is most likely an error "
+ "and can cause weird behaviour. You probably meant to use "
+ "the corresponding *.txt file?"
+ )
+ if force:
+ log.warning("WARNING: " + msg)
+ else:
+ log.error("ERROR: " + msg)
+ sys.exit(2)
+
+ install_command = create_command("install")
+ options, _ = install_command.parse_args([])
+ session = install_command._build_session(options)
+ finder = install_command._build_package_finder(options=options, session=session)
+
+ # Parse requirements file. Note, all options inside requirements file
+ # will be collected by the finder.
+ requirements = flat_map(
+ lambda src: parse_requirements(src, finder=finder, session=session), src_files
+ )
+
+ try:
+ requirements = sync.merge(requirements, ignore_conflicts=force)
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ installed_dists = get_installed_distributions(skip=[], user_only=user_only)
+ to_install, to_uninstall = sync.diff(requirements, installed_dists)
+
+ install_flags = (
+ _compose_install_flags(
+ finder,
+ no_index=no_index,
+ index_url=index_url,
+ extra_index_url=extra_index_url,
+ trusted_host=trusted_host,
+ find_links=find_links,
+ user_only=user_only,
+ cert=cert,
+ client_cert=client_cert,
+ )
+ + shlex.split(pip_args or "")
+ )
+ sys.exit(
+ sync.sync(
+ to_install,
+ to_uninstall,
+ dry_run=dry_run,
+ install_flags=install_flags,
+ ask=ask,
+ )
+ )
+
+
+def _compose_install_flags(
+ finder,
+ no_index=False,
+ index_url=None,
+ extra_index_url=None,
+ trusted_host=None,
+ find_links=None,
+ user_only=False,
+ cert=None,
+ client_cert=None,
+):
+ """
+ Compose install flags with the given finder and CLI options.
+ """
+ result = []
+
+ # Build --index-url/--extra-index-url/--no-index
+ if no_index:
+ result.append("--no-index")
+ elif index_url:
+ result.extend(["--index-url", index_url])
+ elif finder.index_urls:
+ finder_index_url = finder.index_urls[0]
+ if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL:
+ result.extend(["--index-url", finder_index_url])
+ for extra_index in finder.index_urls[1:]:
+ result.extend(["--extra-index-url", extra_index])
+ else:
+ result.append("--no-index")
+
+ for extra_index in extra_index_url:
+ result.extend(["--extra-index-url", extra_index])
+
+ # Build --trusted-hosts
+ for host in itertools.chain(trusted_host, finder.trusted_hosts):
+ result.extend(["--trusted-host", host])
+
+ # Build --find-links
+ for link in itertools.chain(find_links, finder.find_links):
+ result.extend(["--find-links", link])
+
+ # Build format controls --no-binary/--only-binary
+ for format_control in ("no_binary", "only_binary"):
+ formats = getattr(finder.format_control, format_control)
+ if not formats:
+ continue
+ result.extend(
+ ["--" + format_control.replace("_", "-"), ",".join(sorted(formats))]
+ )
+
+ if user_only:
+ result.append("--user")
+
+ if cert:
+ result.extend(["--cert", cert])
+
+ if client_cert:
+ result.extend(["--client-cert", client_cert])
+
+ return result
diff --git a/third_party/python/pip_tools/piptools/sync.py b/third_party/python/pip_tools/piptools/sync.py
new file mode 100644
index 0000000000..9967682c7d
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/sync.py
@@ -0,0 +1,216 @@
+import collections
+import os
+import sys
+import tempfile
+from subprocess import check_call # nosec
+
+from pip._internal.commands.freeze import DEV_PKGS
+from pip._internal.utils.compat import stdlib_pkgs
+
+from . import click
+from .exceptions import IncompatibleRequirements
+from .logging import log
+from .utils import (
+ flat_map,
+ format_requirement,
+ get_hashes_from_ireq,
+ is_url_requirement,
+ key_from_ireq,
+ key_from_req,
+)
+
+PACKAGES_TO_IGNORE = (
+ ["-markerlib", "pip", "pip-tools", "pip-review", "pkg-resources"]
+ + list(stdlib_pkgs)
+ + list(DEV_PKGS)
+)
+
+
+def dependency_tree(installed_keys, root_key):
+ """
+ Calculate the dependency tree for the package `root_key` and return
+ a collection of all its dependencies. Uses a DFS traversal algorithm.
+
+ `installed_keys` should be a {key: requirement} mapping, e.g.
+ {'django': from_line('django==1.8')}
+ `root_key` should be the key to return the dependency tree for.
+ """
+ dependencies = set()
+ queue = collections.deque()
+
+ if root_key in installed_keys:
+ dep = installed_keys[root_key]
+ queue.append(dep)
+
+ while queue:
+ v = queue.popleft()
+ key = key_from_req(v)
+ if key in dependencies:
+ continue
+
+ dependencies.add(key)
+
+ for dep_specifier in v.requires():
+ dep_name = key_from_req(dep_specifier)
+ if dep_name in installed_keys:
+ dep = installed_keys[dep_name]
+
+ if dep_specifier.specifier.contains(dep.version):
+ queue.append(dep)
+
+ return dependencies
+
+
+def get_dists_to_ignore(installed):
+ """
+ Returns a collection of package names to ignore when performing pip-sync,
+ based on the currently installed environment. For example, when pip-tools
+ is installed in the local environment, it should be ignored, including all
+ of its dependencies (e.g. click). When pip-tools is not installed
+ locally, click should also be installed/uninstalled depending on the given
+ requirements.
+ """
+ installed_keys = {key_from_req(r): r for r in installed}
+ return list(
+ flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE)
+ )
+
+
+def merge(requirements, ignore_conflicts):
+ by_key = {}
+
+ for ireq in requirements:
+ # Limitation: URL requirements are merged by precise string match, so
+ # "file:///example.zip#egg=example", "file:///example.zip", and
+ # "example==1.0" will not merge with each other
+ if ireq.match_markers():
+ key = key_from_ireq(ireq)
+
+ if not ignore_conflicts:
+ existing_ireq = by_key.get(key)
+ if existing_ireq:
+ # NOTE: We check equality here since we can assume that the
+ # requirements are all pinned
+ if ireq.specifier != existing_ireq.specifier:
+ raise IncompatibleRequirements(ireq, existing_ireq)
+
+ # TODO: Always pick the largest specifier in case of a conflict
+ by_key[key] = ireq
+ return by_key.values()
+
+
+def diff_key_from_ireq(ireq):
+ """
+ Calculate a key for comparing a compiled requirement with installed modules.
+ For URL requirements, only provide a useful key if the url includes
+ #egg=name==version, which will set ireq.req.name and ireq.specifier.
+ Otherwise return ireq.link so the key will not match and the package will
+ reinstall. Reinstall is necessary to ensure that packages will reinstall
+ if the URL is changed but the version is not.
+ """
+ if is_url_requirement(ireq):
+ if (
+ ireq.req
+ and (getattr(ireq.req, "key", None) or getattr(ireq.req, "name", None))
+ and ireq.specifier
+ ):
+ return key_from_ireq(ireq)
+ return str(ireq.link)
+ return key_from_ireq(ireq)
+
+
+def diff(compiled_requirements, installed_dists):
+ """
+ Calculate which packages should be installed or uninstalled, given a set
+ of compiled requirements and a list of currently installed modules.
+ """
+ requirements_lut = {diff_key_from_ireq(r): r for r in compiled_requirements}
+
+ satisfied = set() # holds keys
+ to_install = set() # holds InstallRequirement objects
+ to_uninstall = set() # holds keys
+
+ pkgs_to_ignore = get_dists_to_ignore(installed_dists)
+ for dist in installed_dists:
+ key = key_from_req(dist)
+ if key not in requirements_lut or not requirements_lut[key].match_markers():
+ to_uninstall.add(key)
+ elif requirements_lut[key].specifier.contains(dist.version):
+ satisfied.add(key)
+
+ for key, requirement in requirements_lut.items():
+ if key not in satisfied and requirement.match_markers():
+ to_install.add(requirement)
+
+ # Make sure to not uninstall any packages that should be ignored
+ to_uninstall -= set(pkgs_to_ignore)
+
+ return (to_install, to_uninstall)
+
+
+def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False):
+ """
+ Install and uninstalls the given sets of modules.
+ """
+ exit_code = 0
+
+ if not to_uninstall and not to_install:
+ log.info("Everything up-to-date", err=False)
+ return exit_code
+
+ pip_flags = []
+ if log.verbosity < 0:
+ pip_flags += ["-q"]
+
+ if ask:
+ dry_run = True
+
+ if dry_run:
+ if to_uninstall:
+ click.echo("Would uninstall:")
+ for pkg in sorted(to_uninstall):
+ click.echo(" {}".format(pkg))
+
+ if to_install:
+ click.echo("Would install:")
+ for ireq in sorted(to_install, key=key_from_ireq):
+ click.echo(" {}".format(format_requirement(ireq)))
+
+ exit_code = 1
+
+ if ask and click.confirm("Would you like to proceed with these changes?"):
+ dry_run = False
+ exit_code = 0
+
+ if not dry_run:
+ if to_uninstall:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "uninstall", "-y"]
+ + pip_flags
+ + sorted(to_uninstall)
+ )
+
+ if to_install:
+ if install_flags is None:
+ install_flags = []
+ # prepare requirement lines
+ req_lines = []
+ for ireq in sorted(to_install, key=key_from_ireq):
+ ireq_hashes = get_hashes_from_ireq(ireq)
+ req_lines.append(format_requirement(ireq, hashes=ireq_hashes))
+
+ # save requirement lines to a temporary file
+ tmp_req_file = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ tmp_req_file.write("\n".join(req_lines))
+ tmp_req_file.close()
+
+ try:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "install", "-r", tmp_req_file.name]
+ + pip_flags
+ + install_flags
+ )
+ finally:
+ os.unlink(tmp_req_file.name)
+
+ return exit_code
diff --git a/third_party/python/pip_tools/piptools/utils.py b/third_party/python/pip_tools/piptools/utils.py
new file mode 100644
index 0000000000..4b20ba6e38
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/utils.py
@@ -0,0 +1,384 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import sys
+from collections import OrderedDict
+from itertools import chain
+
+from click.utils import LazyFile
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.vcs import is_url
+from pip._vendor import six
+from pip._vendor.six.moves import shlex_quote
+
+from .click import style
+
+UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"}
+COMPILE_EXCLUDE_OPTIONS = {
+ "--dry-run",
+ "--quiet",
+ "--rebuild",
+ "--upgrade",
+ "--upgrade-package",
+ "--verbose",
+ "--cache-dir",
+ "--no-reuse-hashes",
+}
+
+
+def key_from_ireq(ireq):
+ """Get a standardized key for an InstallRequirement."""
+ if ireq.req is None and ireq.link is not None:
+ return str(ireq.link)
+ else:
+ return key_from_req(ireq.req)
+
+
+def key_from_req(req):
+ """Get an all-lowercase version of the requirement's name."""
+ if hasattr(req, "key"):
+ # from pkg_resources, such as installed dists for pip-sync
+ key = req.key
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ key = req.name
+
+ key = key.replace("_", "-").lower()
+ return key
+
+
+def comment(text):
+ return style(text, fg="green")
+
+
+def make_install_requirement(name, version, extras, constraint=False):
+ # If no extras are specified, the extras string is blank
+ extras_string = ""
+ if extras:
+ # Sort extras for stability
+ extras_string = "[{}]".format(",".join(sorted(extras)))
+
+ return install_req_from_line(
+ str("{}{}=={}".format(name, extras_string, version)), constraint=constraint
+ )
+
+
+def is_url_requirement(ireq):
+ """
+ Return True if requirement was specified as a path or URL.
+ ireq.original_link will have been set by InstallRequirement.__init__
+ """
+ return bool(ireq.original_link)
+
+
+def format_requirement(ireq, marker=None, hashes=None):
+ """
+ Generic formatter for pretty printing InstallRequirements to the terminal
+ in a less verbose way than using its `__str__` method.
+ """
+ if ireq.editable:
+ line = "-e {}".format(ireq.link.url)
+ elif is_url_requirement(ireq):
+ line = ireq.link.url
+ else:
+ line = str(ireq.req).lower()
+
+ if marker:
+ line = "{} ; {}".format(line, marker)
+
+ if hashes:
+ for hash_ in sorted(hashes):
+ line += " \\\n --hash={}".format(hash_)
+
+ return line
+
+
+def format_specifier(ireq):
+ """
+ Generic formatter for pretty printing the specifier part of
+ InstallRequirements to the terminal.
+ """
+ # TODO: Ideally, this is carried over to the pip library itself
+ specs = ireq.specifier if ireq.req is not None else []
+ specs = sorted(specs, key=lambda x: x.version)
+ return ",".join(str(s) for s in specs) or "<any>"
+
+
+def is_pinned_requirement(ireq):
+ """
+ Returns whether an InstallRequirement is a "pinned" requirement.
+
+ An InstallRequirement is considered pinned if:
+
+ - Is not editable
+ - It has exactly one specifier
+ - That specifier is "=="
+ - The version does not contain a wildcard
+
+ Examples:
+ django==1.8 # pinned
+ django>1.8 # NOT pinned
+ django~=1.8 # NOT pinned
+ django==1.* # NOT pinned
+ """
+ if ireq.editable:
+ return False
+
+ if ireq.req is None or len(ireq.specifier) != 1:
+ return False
+
+ spec = next(iter(ireq.specifier))
+ return spec.operator in {"==", "==="} and not spec.version.endswith(".*")
+
+
+def as_tuple(ireq):
+ """
+ Pulls out the (name: str, version:str, extras:(str)) tuple from
+ the pinned InstallRequirement.
+ """
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq))
+
+ name = key_from_ireq(ireq)
+ version = next(iter(ireq.specifier)).version
+ extras = tuple(sorted(ireq.extras))
+ return name, version, extras
+
+
+def flat_map(fn, collection):
+ """Map a function over a collection and flatten the result by one-level"""
+ return chain.from_iterable(map(fn, collection))
+
+
+def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):
+ """
+ Builds a dict-based lookup table (index) elegantly.
+
+ Supports building normal and unique lookup tables. For example:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {
+ ... 'b': {'bar', 'baz'},
+ ... 'f': {'foo'},
+ ... 'q': {'quux', 'qux'}
+ ... }
+
+ For key functions that uniquely identify values, set unique=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... unique=True) == {
+ ... 'b': 'baz',
+ ... 'f': 'foo',
+ ... 'q': 'quux'
+ ... }
+
+ For the values represented as lists, set use_lists=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... use_lists=True) == {
+ ... 'b': ['bar', 'baz'],
+ ... 'f': ['foo'],
+ ... 'q': ['qux', 'quux']
+ ... }
+
+ The values of the resulting lookup table will be lists, not sets.
+
+ For extra power, you can even change the values while building up the LUT.
+ To do so, use the `keyval` function instead of the `key` arg:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'],
+ ... keyval=lambda s: (s[0], s[1:])) == {
+ ... 'b': {'ar', 'az'},
+ ... 'f': {'oo'},
+ ... 'q': {'uux', 'ux'}
+ ... }
+
+ """
+ if keyval is None:
+ if key is None:
+
+ def keyval(v):
+ return v
+
+ else:
+
+ def keyval(v):
+ return (key(v), v)
+
+ if unique:
+ return dict(keyval(v) for v in values)
+
+ lut = {}
+ for value in values:
+ k, v = keyval(value)
+ try:
+ s = lut[k]
+ except KeyError:
+ if use_lists:
+ s = lut[k] = list()
+ else:
+ s = lut[k] = set()
+ if use_lists:
+ s.append(v)
+ else:
+ s.add(v)
+ return dict(lut)
+
+
+def dedup(iterable):
+ """Deduplicate an iterable object like iter(set(iterable)) but
+ order-preserved.
+ """
+ return iter(OrderedDict.fromkeys(iterable))
+
+
+def name_from_req(req):
+ """Get the name of the requirement"""
+ if hasattr(req, "project_name"):
+ # from pkg_resources, such as installed dists for pip-sync
+ return req.project_name
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ return req.name
+
+
+def fs_str(string):
+ """
+ Convert given string to a correctly encoded filesystem string.
+
+ On Python 2, if the input string is unicode, converts it to bytes
+ encoded with the filesystem encoding.
+
+ On Python 3 returns the string as is, since Python 3 uses unicode
+ paths and the input string shouldn't be bytes.
+
+ :type string: str|unicode
+ :rtype: str
+ """
+ if isinstance(string, str):
+ return string
+ if isinstance(string, bytes):
+ raise TypeError("fs_str() argument must not be bytes")
+ return string.encode(_fs_encoding)
+
+
+_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+
+
+def get_hashes_from_ireq(ireq):
+ """
+ Given an InstallRequirement, return a list of string hashes in
+ the format "{algorithm}:{hash}". Return an empty list if there are no hashes
+ in the requirement options.
+ """
+ result = []
+ for algorithm, hexdigests in ireq.hash_options.items():
+ for hash_ in hexdigests:
+ result.append("{}:{}".format(algorithm, hash_))
+ return result
+
+
+def force_text(s):
+ """
+ Return a string representing `s`.
+ """
+ if s is None:
+ return ""
+ if not isinstance(s, six.string_types):
+ return six.text_type(s)
+ return s
+
+
+def get_compile_command(click_ctx):
+ """
+ Returns a normalized compile command depending on cli context.
+
+ The command will be normalized by:
+ - expanding options short to long
+ - removing values that are already default
+ - sorting the arguments
+ - removing one-off arguments like '--upgrade'
+ - removing arguments that don't change build behaviour like '--verbose'
+ """
+ from piptools.scripts.compile import cli
+
+ # Map of the compile cli options (option name -> click.Option)
+ compile_options = {option.name: option for option in cli.params}
+
+ left_args = []
+ right_args = []
+
+ for option_name, value in click_ctx.params.items():
+ option = compile_options[option_name]
+
+ # Collect variadic args separately, they will be added
+ # at the end of the command later
+ if option.nargs < 0:
+ # These will necessarily be src_files
+ # Re-add click-stripped '--' if any start with '-'
+ if any(val.startswith("-") and val != "-" for val in value):
+ right_args.append("--")
+ right_args.extend([shlex_quote(force_text(val)) for val in value])
+ continue
+
+ # Get the latest option name (usually it'll be a long name)
+ option_long_name = option.opts[-1]
+
+ # Exclude one-off options (--upgrade/--upgrade-package/--rebuild/...)
+ # or options that don't change compile behaviour (--verbose/--dry-run/...)
+ if option_long_name in COMPILE_EXCLUDE_OPTIONS:
+ continue
+
+ # Skip options without a value
+ if option.default is None and not value:
+ continue
+
+ # Skip options with a default value
+ if option.default == value:
+ continue
+
+ # Use a file name for file-like objects
+ if isinstance(value, LazyFile):
+ value = value.name
+
+ # Convert value to the list
+ if not isinstance(value, (tuple, list)):
+ value = [value]
+
+ for val in value:
+ # Flags don't have a value, thus add to args true or false option long name
+ if option.is_flag:
+ # If there are false-options, choose an option name depending on a value
+ if option.secondary_opts:
+ # Get the latest false-option
+ secondary_option_long_name = option.secondary_opts[-1]
+ arg = option_long_name if val else secondary_option_long_name
+ # There are no false-options, use true-option
+ else:
+ arg = option_long_name
+ left_args.append(shlex_quote(arg))
+ # Append to args the option with a value
+ else:
+ if isinstance(val, six.string_types) and is_url(val):
+ val = redact_auth_from_url(val)
+ if option.name == "pip_args":
+ # shlex_quote would produce functional but noisily quoted results,
+ # e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"''
+ # Instead, we try to get more legible quoting via repr:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=repr(fs_str(force_text(val)))
+ )
+ )
+ else:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=shlex_quote(force_text(val))
+ )
+ )
+
+ return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args))
diff --git a/third_party/python/pip_tools/piptools/writer.py b/third_party/python/pip_tools/piptools/writer.py
new file mode 100644
index 0000000000..515df198eb
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/writer.py
@@ -0,0 +1,243 @@
+from __future__ import unicode_literals
+
+import os
+import re
+from itertools import chain
+
+from pip._vendor import six
+
+from .click import unstyle
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ comment,
+ dedup,
+ format_requirement,
+ get_compile_command,
+ key_from_ireq,
+)
+
+MESSAGE_UNHASHED_PACKAGE = comment(
+ "# WARNING: pip install will require the following package to be hashed."
+ "\n# Consider using a hashable URL like "
+ "https://github.com/jazzband/pip-tools/archive/SOMECOMMIT.zip"
+)
+
+MESSAGE_UNSAFE_PACKAGES_UNPINNED = comment(
+ "# WARNING: The following packages were not pinned, but pip requires them to be"
+ "\n# pinned when the requirements file includes hashes. "
+ "Consider using the --allow-unsafe flag."
+)
+
+MESSAGE_UNSAFE_PACKAGES = comment(
+ "# The following packages are considered to be unsafe in a requirements file:"
+)
+
+MESSAGE_UNINSTALLABLE = (
+ "The generated requirements file may be rejected by pip install. "
+ "See # WARNING lines for details."
+)
+
+
+strip_comes_from_line_re = re.compile(r" \(line \d+\)$")
+
+
+def _comes_from_as_string(ireq):
+ if isinstance(ireq.comes_from, six.string_types):
+ return strip_comes_from_line_re.sub("", ireq.comes_from)
+ return key_from_ireq(ireq.comes_from)
+
+
+class OutputWriter(object):
+ def __init__(
+ self,
+ src_files,
+ dst_file,
+ click_ctx,
+ dry_run,
+ emit_header,
+ emit_index_url,
+ emit_trusted_host,
+ annotate,
+ generate_hashes,
+ default_index_url,
+ index_urls,
+ trusted_hosts,
+ format_control,
+ allow_unsafe,
+ find_links,
+ emit_find_links,
+ ):
+ self.src_files = src_files
+ self.dst_file = dst_file
+ self.click_ctx = click_ctx
+ self.dry_run = dry_run
+ self.emit_header = emit_header
+ self.emit_index_url = emit_index_url
+ self.emit_trusted_host = emit_trusted_host
+ self.annotate = annotate
+ self.generate_hashes = generate_hashes
+ self.default_index_url = default_index_url
+ self.index_urls = index_urls
+ self.trusted_hosts = trusted_hosts
+ self.format_control = format_control
+ self.allow_unsafe = allow_unsafe
+ self.find_links = find_links
+ self.emit_find_links = emit_find_links
+
+ def _sort_key(self, ireq):
+ return (not ireq.editable, str(ireq.req).lower())
+
+ def write_header(self):
+ if self.emit_header:
+ yield comment("#")
+ yield comment("# This file is autogenerated by pip-compile")
+ yield comment("# To update, run:")
+ yield comment("#")
+ compile_command = os.environ.get(
+ "CUSTOM_COMPILE_COMMAND"
+ ) or get_compile_command(self.click_ctx)
+ yield comment("# {}".format(compile_command))
+ yield comment("#")
+
+ def write_index_options(self):
+ if self.emit_index_url:
+ for index, index_url in enumerate(dedup(self.index_urls)):
+ if index_url.rstrip("/") == self.default_index_url:
+ continue
+ flag = "--index-url" if index == 0 else "--extra-index-url"
+ yield "{} {}".format(flag, index_url)
+
+ def write_trusted_hosts(self):
+ if self.emit_trusted_host:
+ for trusted_host in dedup(self.trusted_hosts):
+ yield "--trusted-host {}".format(trusted_host)
+
+ def write_format_controls(self):
+ for nb in dedup(sorted(self.format_control.no_binary)):
+ yield "--no-binary {}".format(nb)
+ for ob in dedup(sorted(self.format_control.only_binary)):
+ yield "--only-binary {}".format(ob)
+
+ def write_find_links(self):
+ if self.emit_find_links:
+ for find_link in dedup(self.find_links):
+ yield "--find-links {}".format(find_link)
+
+ def write_flags(self):
+ emitted = False
+ for line in chain(
+ self.write_index_options(),
+ self.write_find_links(),
+ self.write_trusted_hosts(),
+ self.write_format_controls(),
+ ):
+ emitted = True
+ yield line
+ if emitted:
+ yield ""
+
+ def _iter_lines(self, results, unsafe_requirements=None, markers=None, hashes=None):
+ # default values
+ unsafe_requirements = unsafe_requirements or []
+ markers = markers or {}
+ hashes = hashes or {}
+
+ # Check for unhashed or unpinned packages if at least one package does have
+ # hashes, which will trigger pip install's --require-hashes mode.
+ warn_uninstallable = False
+ has_hashes = hashes and any(hash for hash in hashes.values())
+
+ yielded = False
+
+ for line in self.write_header():
+ yield line
+ yielded = True
+ for line in self.write_flags():
+ yield line
+ yielded = True
+
+ unsafe_requirements = (
+ {r for r in results if r.name in UNSAFE_PACKAGES}
+ if not unsafe_requirements
+ else unsafe_requirements
+ )
+ packages = {r for r in results if r.name not in UNSAFE_PACKAGES}
+
+ if packages:
+ packages = sorted(packages, key=self._sort_key)
+ for ireq in packages:
+ if has_hashes and not hashes.get(ireq):
+ yield MESSAGE_UNHASHED_PACKAGE
+ warn_uninstallable = True
+ line = self._format_requirement(
+ ireq, markers.get(key_from_ireq(ireq)), hashes=hashes
+ )
+ yield line
+ yielded = True
+
+ if unsafe_requirements:
+ unsafe_requirements = sorted(unsafe_requirements, key=self._sort_key)
+ yield ""
+ yielded = True
+ if has_hashes and not self.allow_unsafe:
+ yield MESSAGE_UNSAFE_PACKAGES_UNPINNED
+ warn_uninstallable = True
+ else:
+ yield MESSAGE_UNSAFE_PACKAGES
+
+ for ireq in unsafe_requirements:
+ ireq_key = key_from_ireq(ireq)
+ if not self.allow_unsafe:
+ yield comment("# {}".format(ireq_key))
+ else:
+ line = self._format_requirement(
+ ireq, marker=markers.get(ireq_key), hashes=hashes
+ )
+ yield line
+
+ # Yield even when there's no real content, so that blank files are written
+ if not yielded:
+ yield ""
+
+ if warn_uninstallable:
+ log.warning(MESSAGE_UNINSTALLABLE)
+
+ def write(self, results, unsafe_requirements, markers, hashes):
+
+ for line in self._iter_lines(results, unsafe_requirements, markers, hashes):
+ log.info(line)
+ if not self.dry_run:
+ self.dst_file.write(unstyle(line).encode("utf-8"))
+ self.dst_file.write(os.linesep.encode("utf-8"))
+
+ def _format_requirement(self, ireq, marker=None, hashes=None):
+ ireq_hashes = (hashes if hashes is not None else {}).get(ireq)
+
+ line = format_requirement(ireq, marker=marker, hashes=ireq_hashes)
+
+ if not self.annotate:
+ return line
+
+ # Annotate what packages or reqs-ins this package is required by
+ required_by = set()
+ if hasattr(ireq, "_source_ireqs"):
+ required_by |= {
+ _comes_from_as_string(src_ireq)
+ for src_ireq in ireq._source_ireqs
+ if src_ireq.comes_from
+ }
+ elif ireq.comes_from:
+ required_by.add(_comes_from_as_string(ireq))
+ if required_by:
+ required_by = sorted(required_by)
+ if len(required_by) == 1:
+ source = required_by[0]
+ annotation = " # via " + source
+ else:
+ annotation_lines = [" # via"]
+ for source in required_by:
+ annotation_lines.append(" # " + source)
+ annotation = "\n".join(annotation_lines)
+ line = "{}\n{}".format(line, comment(annotation))
+ return line