summaryrefslogtreecommitdiffstats
path: root/third_party/python/pip-tools
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/python/pip-tools')
-rw-r--r--third_party/python/pip-tools/.appveyor.yml52
-rw-r--r--third_party/python/pip-tools/.bandit2
-rw-r--r--third_party/python/pip-tools/.coveragerc8
-rw-r--r--third_party/python/pip-tools/.fussyfox.yml3
-rw-r--r--third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md28
-rw-r--r--third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md19
-rw-r--r--third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md9
-rw-r--r--third_party/python/pip-tools/.github/workflows/ci.yml66
-rw-r--r--third_party/python/pip-tools/.github/workflows/cron.yml73
-rw-r--r--third_party/python/pip-tools/.github/workflows/qa.yml43
-rw-r--r--third_party/python/pip-tools/.gitignore31
-rw-r--r--third_party/python/pip-tools/.pre-commit-config.yaml24
-rw-r--r--third_party/python/pip-tools/.pre-commit-hooks.yaml7
-rw-r--r--third_party/python/pip-tools/.travis.yml72
-rw-r--r--third_party/python/pip-tools/CHANGELOG.md607
-rw-r--r--third_party/python/pip-tools/CONTRIBUTING.md49
-rw-r--r--third_party/python/pip-tools/LICENSE26
-rw-r--r--third_party/python/pip-tools/PKG-INFO495
-rw-r--r--third_party/python/pip-tools/README.rst466
-rw-r--r--third_party/python/pip-tools/examples/django.in3
-rw-r--r--third_party/python/pip-tools/examples/flask.in2
-rw-r--r--third_party/python/pip-tools/examples/hypothesis.in1
-rw-r--r--third_party/python/pip-tools/examples/protection.in3
-rw-r--r--third_party/python/pip-tools/examples/sentry.in2
-rw-r--r--third_party/python/pip-tools/img/pip-tools-overview.pngbin0 -> 23961 bytes
-rw-r--r--third_party/python/pip-tools/piptools/__init__.py11
-rw-r--r--third_party/python/pip-tools/piptools/__main__.py17
-rw-r--r--third_party/python/pip-tools/piptools/_compat/__init__.py12
-rw-r--r--third_party/python/pip-tools/piptools/_compat/contextlib.py18
-rw-r--r--third_party/python/pip-tools/piptools/_compat/pip_compat.py29
-rw-r--r--third_party/python/pip-tools/piptools/_compat/tempfile.py88
-rw-r--r--third_party/python/pip-tools/piptools/cache.py170
-rw-r--r--third_party/python/pip-tools/piptools/click.py6
-rw-r--r--third_party/python/pip-tools/piptools/exceptions.py66
-rw-r--r--third_party/python/pip-tools/piptools/locations.py25
-rw-r--r--third_party/python/pip-tools/piptools/logging.py62
-rw-r--r--third_party/python/pip-tools/piptools/repositories/__init__.py3
-rw-r--r--third_party/python/pip-tools/piptools/repositories/base.py55
-rw-r--r--third_party/python/pip-tools/piptools/repositories/local.py99
-rw-r--r--third_party/python/pip-tools/piptools/repositories/pypi.py524
-rw-r--r--third_party/python/pip-tools/piptools/resolver.py408
-rw-r--r--third_party/python/pip-tools/piptools/scripts/__init__.py0
-rwxr-xr-xthird_party/python/pip-tools/piptools/scripts/compile.py501
-rwxr-xr-xthird_party/python/pip-tools/piptools/scripts/sync.py217
-rw-r--r--third_party/python/pip-tools/piptools/sync.py223
-rw-r--r--third_party/python/pip-tools/piptools/utils.py388
-rw-r--r--third_party/python/pip-tools/piptools/writer.py239
-rw-r--r--third_party/python/pip-tools/setup.cfg85
-rw-r--r--third_party/python/pip-tools/setup.py3
-rw-r--r--third_party/python/pip-tools/tox.ini52
50 files changed, 5392 insertions, 0 deletions
diff --git a/third_party/python/pip-tools/.appveyor.yml b/third_party/python/pip-tools/.appveyor.yml
new file mode 100644
index 0000000000..5df681cda4
--- /dev/null
+++ b/third_party/python/pip-tools/.appveyor.yml
@@ -0,0 +1,52 @@
+environment:
+ global:
+ PYTHON: "C:\\Python36"
+
+ matrix:
+ - TOXENV: py27-pip20.0-coverage
+ PIP: 20.0
+ - TOXENV: py27-piplatest-coverage
+ PIP: latest
+
+ - TOXENV: py35-pip20.0
+ PIP: 20.0
+ - TOXENV: py35-piplatest
+ PIP: latest
+
+ - TOXENV: py36-pip20.0
+ PIP: 20.0
+ - TOXENV: py36-piplatest
+ PIP: latest
+
+ - TOXENV: py37-pip20.0
+ PIP: 20.0
+ - TOXENV: py37-piplatest
+ PIP: latest
+
+ - TOXENV: py38-pip20.0-coverage
+ PIP: 20.0
+ - TOXENV: py38-piplatest-coverage
+ PIP: latest
+
+matrix:
+ fast_finish: true
+ allow_failures:
+ - PIP: master
+
+install:
+ - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
+ - python -m pip install -U tox virtualenv
+
+build: false
+
+test_script:
+ - tox
+
+after_test:
+ # Add tox environment to PATH.
+ - "SET PATH=%CD%\\.tox\\%TOXENV%\\scripts;%PATH%"
+ - IF NOT "x%TOXENV:-coverage=%"=="x%TOXENV%" (
+ pip install codecov &&
+ coverage xml &&
+ appveyor-retry codecov --required -X gcov pycov search -f coverage.xml -n %TOXENV%-windows
+ )
diff --git a/third_party/python/pip-tools/.bandit b/third_party/python/pip-tools/.bandit
new file mode 100644
index 0000000000..b9d346ecac
--- /dev/null
+++ b/third_party/python/pip-tools/.bandit
@@ -0,0 +1,2 @@
+[bandit]
+exclude: tests,.tox,.eggs,.venv,.git
diff --git a/third_party/python/pip-tools/.coveragerc b/third_party/python/pip-tools/.coveragerc
new file mode 100644
index 0000000000..26c6762a74
--- /dev/null
+++ b/third_party/python/pip-tools/.coveragerc
@@ -0,0 +1,8 @@
+[run]
+branch = True
+source = .
+omit =
+ piptools/_compat/*
+
+[report]
+include = piptools/*, tests/*
diff --git a/third_party/python/pip-tools/.fussyfox.yml b/third_party/python/pip-tools/.fussyfox.yml
new file mode 100644
index 0000000000..c488ca3525
--- /dev/null
+++ b/third_party/python/pip-tools/.fussyfox.yml
@@ -0,0 +1,3 @@
+- bandit
+- isort
+- flake8
diff --git a/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md
new file mode 100644
index 0000000000..d8be0c5674
--- /dev/null
+++ b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md
@@ -0,0 +1,28 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+<!-- Describe the issue briefly here. -->
+
+#### Environment Versions
+
+1. OS Type
+1. Python version: `$ python -V`
+1. pip version: `$ pip --version`
+1. pip-tools version: `$ pip-compile --version`
+
+#### Steps to replicate
+
+1. ...
+2. ...
+3. ...
+
+#### Expected result
+
+...
+
+#### Actual result
+
+...
diff --git a/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md
new file mode 100644
index 0000000000..e2c7c5b4ed
--- /dev/null
+++ b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md
@@ -0,0 +1,19 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+
+---
+
+#### What's the problem this feature will solve?
+<!-- What are you trying to do, that you are unable to achieve with pip-tools as it currently stands? -->
+
+#### Describe the solution you'd like
+<!-- A clear and concise description of what you want to happen. -->
+
+<!-- Provide examples of real-world use cases that this would enable and how it solves the problem described above. -->
+
+#### Alternative Solutions
+<!-- Have you tried to workaround the problem using pip-tools or other tools? Or a different approach to solving this issue? Please elaborate here. -->
+
+#### Additional context
+<!-- Add any other context, links, etc. about the feature here. -->
diff --git a/third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md b/third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..430ad24772
--- /dev/null
+++ b/third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,9 @@
+<!--- Describe the changes here. --->
+
+**Changelog-friendly one-liner**: <!-- One-liner description here -->
+
+##### Contributor checklist
+
+- [ ] Provided the tests for the changes.
+- [ ] Gave a clear one-line description in the PR (that the maintainers can add to CHANGELOG.md on release).
+- [ ] Assign the PR to an existing or new milestone for the target version (following [Semantic Versioning](https://blog.versioneye.com/2014/01/16/semantic-versioning/)).
diff --git a/third_party/python/pip-tools/.github/workflows/ci.yml b/third_party/python/pip-tools/.github/workflows/ci.yml
new file mode 100644
index 0000000000..b5da29dde1
--- /dev/null
+++ b/third_party/python/pip-tools/.github/workflows/ci.yml
@@ -0,0 +1,66 @@
+name: CI
+
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ tags:
+ schedule:
+ # Run everyday at 03:53 UTC
+ - cron: 53 3 * * *
+
+jobs:
+ test:
+ name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
+ runs-on: ${{ matrix.os }}-latest
+ strategy:
+ matrix:
+ os:
+ - Ubuntu
+ - Windows
+ - macOS
+ python-version:
+ - 3.8
+ - 2.7
+ - 3.5
+ - 3.6
+ - 3.7
+ pip-version:
+ - "latest"
+ - "20.2" # TODO: update to 20.1 after pip-20.2 being released
+ - "20.0"
+ include:
+ - os: Ubuntu
+ python-version: 3.9-dev
+ pip-version: latest
+
+ env:
+ PY_COLORS: 1
+ TOXENV: pip${{ matrix.pip-version }}-coverage
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }} from GitHub
+ if: "!endsWith(matrix.python-version, '-dev')"
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Set up Python ${{ matrix.python-version }} from deadsnakes
+ if: endsWith(matrix.python-version, '-dev')
+ uses: deadsnakes/action@v1.0.0
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Log python version info (${{ matrix.python-version }})
+ run: python --version --version
+ - name: Install test dependencies
+ run: python -m pip install -U tox virtualenv
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test pip ${{ matrix.pip-version }}
+ run: tox
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v1.0.6
+ with:
+ file: ./coverage.xml
+ name: ${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.pip-version }}
diff --git a/third_party/python/pip-tools/.github/workflows/cron.yml b/third_party/python/pip-tools/.github/workflows/cron.yml
new file mode 100644
index 0000000000..149825a1d2
--- /dev/null
+++ b/third_party/python/pip-tools/.github/workflows/cron.yml
@@ -0,0 +1,73 @@
+name: Cron
+
+on:
+ schedule:
+ # Run every day at 00:00 UTC
+ - cron: 0 0 * * *
+
+jobs:
+ master:
+ name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
+ runs-on: ${{ matrix.os }}-latest
+ strategy:
+ matrix:
+ os:
+ - Ubuntu
+ - Windows
+ - MacOS
+ python-version:
+ - 3.8
+ - 2.7
+ - 3.5
+ - 3.6
+ - 3.7
+ pip-version:
+ - master
+ env:
+ PY_COLORS: 1
+ TOXENV: pip${{ matrix.pip-version }}
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install test dependencies
+ run: python -m pip install -U tox virtualenv
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test pip ${{ matrix.pip-version }}
+ run: tox
+
+ pypy:
+ name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
+ runs-on: ${{ matrix.os }}-latest
+ strategy:
+ matrix:
+ os:
+ - Ubuntu
+ - MacOS
+ # TODO: fix test_realistic_complex_sub_dependencies test on Windows
+ # - Windows
+ python-version:
+ - pypy3
+ - pypy2
+ pip-version:
+ - latest
+ env:
+ PY_COLORS: 1
+ TOXENV: pip${{ matrix.pip-version }}
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install tox
+ run: pip install tox
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test pip ${{ matrix.pip-version }}
+ run: tox
diff --git a/third_party/python/pip-tools/.github/workflows/qa.yml b/third_party/python/pip-tools/.github/workflows/qa.yml
new file mode 100644
index 0000000000..2c5d6f5c59
--- /dev/null
+++ b/third_party/python/pip-tools/.github/workflows/qa.yml
@@ -0,0 +1,43 @@
+name: QA
+
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ tags:
+
+jobs:
+ qa:
+ name: ${{ matrix.toxenv }}
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ toxenv:
+ - checkqa
+ - readme
+ python-version:
+ - "3.x"
+ env:
+ PY_COLORS: 1
+ TOXENV: ${{ matrix.toxenv }}
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Prepare cache key
+ id: cache-key
+ run: echo "::set-output name=sha-256::$(python -VV | sha256sum | cut -d' ' -f1)"
+ - uses: actions/cache@v1
+ with:
+ path: ~/.cache/pre-commit
+ key: pre-commit|${{ steps.cache-key.outputs.sha-256 }}|${{ hashFiles('.pre-commit-config.yaml') }}
+ - name: Install tox
+ run: pip install tox
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test ${{ matrix.toxenv }}
+ run: tox
diff --git a/third_party/python/pip-tools/.gitignore b/third_party/python/pip-tools/.gitignore
new file mode 100644
index 0000000000..bc17d7d80b
--- /dev/null
+++ b/third_party/python/pip-tools/.gitignore
@@ -0,0 +1,31 @@
+# Ignore cram test output
+*.t.err
+
+# Python cruft
+*.pyc
+
+# Virtualenvs
+.envrc
+.direnv
+.venv
+venv/
+
+# Testing
+.pytest_cache/
+.tox
+htmlcov
+
+# Build output
+build
+dist
+*.egg-info
+.coverage
+.cache
+
+# IDE
+.idea
+
+# Test files
+requirements.in
+requirements.txt
+.eggs/
diff --git a/third_party/python/pip-tools/.pre-commit-config.yaml b/third_party/python/pip-tools/.pre-commit-config.yaml
new file mode 100644
index 0000000000..cb06e17f07
--- /dev/null
+++ b/third_party/python/pip-tools/.pre-commit-config.yaml
@@ -0,0 +1,24 @@
+repos:
+ - repo: https://github.com/psf/black
+ rev: 19.10b0
+ hooks:
+ - id: black
+ language_version: python3
+ - repo: https://github.com/pre-commit/mirrors-isort
+ rev: v4.3.21
+ hooks:
+ - id: isort
+ language_version: python3
+ - repo: https://gitlab.com/pycqa/flake8
+ rev: 3.8.1
+ hooks:
+ - id: flake8
+ language_version: python3
+ additional_dependencies:
+ - flake8-pytest-style
+ - repo: https://github.com/PyCQA/bandit
+ rev: 1.6.2
+ hooks:
+ - id: bandit
+ language_version: python3
+ exclude: ^tests/
diff --git a/third_party/python/pip-tools/.pre-commit-hooks.yaml b/third_party/python/pip-tools/.pre-commit-hooks.yaml
new file mode 100644
index 0000000000..c70f0382ff
--- /dev/null
+++ b/third_party/python/pip-tools/.pre-commit-hooks.yaml
@@ -0,0 +1,7 @@
+- id: pip-compile
+ name: pip-compile
+ description: Automatically compile requirements.
+ entry: pip-compile
+ language: python
+ files: ^requirements\.(in|txt)$
+ pass_filenames: false
diff --git a/third_party/python/pip-tools/.travis.yml b/third_party/python/pip-tools/.travis.yml
new file mode 100644
index 0000000000..7ed071a83c
--- /dev/null
+++ b/third_party/python/pip-tools/.travis.yml
@@ -0,0 +1,72 @@
+dist: xenial
+sudo: false
+language: python
+python:
+ - "2.7"
+ - "3.5"
+ - "3.6"
+ - "3.7"
+ - "3.8"
+
+env:
+ # NOTE: keep this in sync with envlist in tox.ini for tox-travis.
+ - PIP=latest
+ - PIP=20.2 # TODO: update to 20.1 after pip-20.2 being released
+ - PIP=20.0
+
+cache: false
+install:
+ - travis_retry python -m pip install -U tox-travis virtualenv
+script:
+ - tox
+
+stages:
+- test
+- name: deploy
+ if: repo = jazzband/pip-tools AND tag IS present
+
+jobs:
+ include:
+ # QA checks.
+ - env: TOXENV=checkqa
+ python: 3.7
+ after_success: skip # No need coverage
+ cache:
+ directories:
+ - $HOME/.cache/pre-commit
+ - env: TOXENV=readme
+ python: 2.7
+ after_success: skip # No need coverage
+
+ # Only test pypy/pypy3 with latest pip.
+ - env: PIP=latest
+ python: "pypy2.7-6.0"
+ - env: PIP=latest
+ python: "pypy3.5-6.0"
+
+ - stage: deploy
+ install: skip # No need to install tox-travis on deploy.
+ script: skip # No test on the deploy stage.
+ python: 2.7
+ env: skip # No special env required.
+ after_success: true # No need coverage
+ deploy:
+ provider: pypi
+ user: jazzband
+ server: https://jazzband.co/projects/pip-tools/upload
+ distributions: sdist bdist_wheel
+ password:
+ secure: TCG9beQgarL/EDHiwSCgEf1JnofTroA5QRp2OTL3QC+eaar6FftqxcJQw3FwnHJ7NarI6E7bcxn9wDRs6tXqiLcyGOzWUnR4jQ94w/7YaLQjzLr8g1toRnb9dNwU1l14z2hmnzc4oRqu7+pi4wIpBx+11Ke9JXNcrC+cRFwCdLQ=
+ on:
+ tags: true
+ repo: jazzband/pip-tools
+ allow_failures:
+ - env: PIP=master
+
+after_success:
+ - travis_retry pip install codecov coveralls
+ - travis_retry codecov --required -n "py${TRAVIS_PYTHON_VERSION}-pip${PIP}-${TRAVIS_OS_NAME}"
+ - "COVERALLS_PARALLEL=true coveralls"
+
+notifications:
+ webhooks: https://coveralls.io/webhook
diff --git a/third_party/python/pip-tools/CHANGELOG.md b/third_party/python/pip-tools/CHANGELOG.md
new file mode 100644
index 0000000000..75b4960ebb
--- /dev/null
+++ b/third_party/python/pip-tools/CHANGELOG.md
@@ -0,0 +1,607 @@
+# 5.3.1 (2020-07-31)
+
+Bug Fixes:
+- Fix `pip-20.2` compatibility issue that caused `pip-tools` to sometime fail to
+stabilize in a constant number of rounds
+([1194](https://github.com/jazzband/pip-tools/pull/1194)). Thanks @vphilippon
+
+# 5.3.0 (2020-07-26)
+
+Features:
+- Add `-h` alias for `--help` option to `pip-sync` and `pip-compile`
+([1163](https://github.com/jazzband/pip-tools/pull/1163)). Thanks @jan25
+- Add `pip>=20.2` support
+([1168](https://github.com/jazzband/pip-tools/pull/1168)). Thanks @atugushev
+- `pip-sync` now exists with code `1` on `--dry-run`
+([1172](https://github.com/jazzband/pip-tools/pull/1172)). Thanks @francisbrito
+- `pip-compile` now doesn't resolve constraints from `-c constraints.txt`that are not
+(yet) requirements
+ ([1175](https://github.com/jazzband/pip-tools/pull/1175)). Thanks @clslgrnc
+- Add `--reuse-hashes/--no-reuse-hashes` options to `pip-compile`
+ ([1177](https://github.com/jazzband/pip-tools/pull/1177)). Thanks @graingert
+
+# 5.2.1 (2020-06-09)
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would lose some dependencies on update
+a `requirements.txt`
+([1159](https://github.com/jazzband/pip-tools/pull/1159)). Thanks @richafrank
+
+
+# 5.2.0 (2020-05-27)
+
+Features:
+- Show basename of URLs when `pip-compile` generates hashes in a verbose mode
+([1113](https://github.com/jazzband/pip-tools/pull/1113)). Thanks @atugushev
+- Add `--emit-index-url/--no-emit-index-url` options to `pip-compile`
+([1130](https://github.com/jazzband/pip-tools/pull/1130)). Thanks @atugushev
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would ignore some of package versions when
+`PIP_PREFER_BINARY` is set on
+([1119](https://github.com/jazzband/pip-tools/pull/1119)). Thanks @atugushev
+- Fix leaked URLs with credentials in the debug output of `pip-compile`.
+([1146](https://github.com/jazzband/pip-tools/pull/1146)). Thanks @atugushev
+- Fix a bug where URL requirements would have name collisions
+([1149](https://github.com/jazzband/pip-tools/pull/1149)). Thanks @geokala
+
+Deprecations:
+- Deprecate `--index/--no-index` in favor of `--emit-index-url/--no-emit-index-url`
+options in `pip-compile`
+([1130](https://github.com/jazzband/pip-tools/pull/1130)). Thanks @atugushev
+
+Other Changes:
+
+- Switch to `setuptools` declarative syntax through `setup.cfg`
+([1141](https://github.com/jazzband/pip-tools/pull/1141)). Thanks @jdufresne
+
+# 5.1.2 (2020-05-05)
+
+Bug Fixes:
+- Fix grouping of editables and non-editables requirements
+([1132](https://github.com/jazzband/pip-tools/pull/1132)). Thanks @richafrank
+
+# 5.1.1 (2020-05-01)
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would generate hashes for `*.egg` files
+([#1122](https://github.com/jazzband/pip-tools/pull/1122)). Thanks @atugushev
+
+# 5.1.0 (2020-04-27)
+
+Features:
+- Show progress bar when downloading packages in `pip-compile` verbose mode
+([#949](https://github.com/jazzband/pip-tools/pull/949)). Thanks @atugushev
+- `pip-compile` now gets hashes from `PyPI` JSON API (if available) which significantly
+increases the speed of hashes generation
+([#1109](https://github.com/jazzband/pip-tools/pull/1109)). Thanks @atugushev
+
+# 5.0.0 (2020-04-16)
+
+Backwards Incompatible Changes:
+- `pip-tools` now requires `pip>=20.0` (previosly `8.1.x` - `20.0.x`). Windows users,
+ make sure to use `python -m pip install pip-tools` to avoid issues with `pip`
+ self-update from now on
+([#1055](https://github.com/jazzband/pip-tools/pull/1055)). Thanks @atugushev
+- `--build-isolation` option now set on by default for `pip-compile`
+([#1060](https://github.com/jazzband/pip-tools/pull/1060)). Thanks @hramezani
+
+Features:
+- Exclude requirements with non-matching markers from `pip-sync`
+([#927](https://github.com/jazzband/pip-tools/pull/927)). Thanks @AndydeCleyre
+- Add `pre-commit` hook for `pip-compile`
+([#976](https://github.com/jazzband/pip-tools/pull/976)). Thanks @atugushev
+- `pip-compile` and `pip-sync` now pass anything provided to the new `--pip-args` option on to `pip`
+([#1080](https://github.com/jazzband/pip-tools/pull/1080)). Thanks @AndydeCleyre
+- `pip-compile` output headers are now more accurate when `--` is used to escape filenames
+([#1080](https://github.com/jazzband/pip-tools/pull/1080)). Thanks @AndydeCleyre
+- Add `pip>=20.1` support
+([#1088](https://github.com/jazzband/pip-tools/pull/1088)). Thanks @atugushev
+
+Bug Fixes:
+- Fix a bug where editables that are both direct requirements and constraints wouldn't appear in `pip-compile` output
+([#1093](https://github.com/jazzband/pip-tools/pull/1093)). Thanks @richafrank
+- `pip-compile` now sorts format controls (`--no-binary/--only-binary`) to ensure consistent results
+([#1098](https://github.com/jazzband/pip-tools/pull/1098)). Thanks @richafrank
+
+Improved Documentation:
+- Add cross-environment usage documentation to `README`
+([#651](https://github.com/jazzband/pip-tools/pull/651)). Thanks @vphilippon
+- Add versions compatibility table to `README`
+([#1106](https://github.com/jazzband/pip-tools/pull/1106)). Thanks @atugushev
+
+# 4.5.1 (2020-02-26)
+
+Bug Fixes:
+- Strip line number annotations such as "(line XX)" from file requirements, to prevent diff noise when modifying input requirement files
+([#1075](https://github.com/jazzband/pip-tools/pull/1075)). Thanks @adamchainz
+
+Improved Documentation:
+- Updated `README` example outputs for primary requirement annotations
+([#1072](https://github.com/jazzband/pip-tools/pull/1072)). Thanks @richafrank
+
+# 4.5.0 (2020-02-20)
+
+Features:
+- Primary requirements and VCS dependencies are now get annotated with any source `.in` files and reverse dependencies
+([#1058](https://github.com/jazzband/pip-tools/pull/1058)). Thanks @AndydeCleyre
+
+Bug Fixes:
+- Always use normalized path for cache directory as it is required in newer versions of `pip`
+([#1062](https://github.com/jazzband/pip-tools/pull/1062)). Thanks @kammala
+
+Improved Documentation:
+- Replace outdated link in the `README` with rationale for pinning
+([#1053](https://github.com/jazzband/pip-tools/pull/1053)). Thanks @m-aciek
+
+# 4.4.1 (2020-01-31)
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would keep outdated options from `requirements.txt`
+([#1029](https://github.com/jazzband/pip-tools/pull/1029)). Thanks @atugushev
+- Fix the `No handlers could be found for logger "pip.*"` error by configuring the builtin logging module
+([#1035](https://github.com/jazzband/pip-tools/pull/1035)). Thanks @vphilippon
+- Fix a bug where dependencies of relevant constraints may be missing from output file
+([#1037](https://github.com/jazzband/pip-tools/pull/1037)). Thanks @jeevb
+- Upgrade the minimal version of `click` from `6.0` to `7.0` version in `setup.py`
+([#1039](https://github.com/jazzband/pip-tools/pull/1039)). Thanks @hramezani
+- Ensure that depcache considers the python implementation such that (for example) `cpython3.6` does not poison the results of `pypy3.6`
+([#1050](https://github.com/jazzband/pip-tools/pull/1050)). Thanks @asottile
+
+Improved Documentation:
+- Make the `README` more imperative about installing into a project's virtual environment to avoid confusion
+([#1023](https://github.com/jazzband/pip-tools/pull/1023)). Thanks @tekumara
+- Add a note to the `README` about how to install requirements on different stages to [Workflow for layered requirements](https://github.com/jazzband/pip-tools#workflow-for-layered-requirements) section
+([#1044](https://github.com/jazzband/pip-tools/pull/1044)). Thanks @hramezani
+
+# 4.4.0 (2020-01-21)
+
+Features:
+- Add `--cache-dir` option to `pip-compile`
+([#1022](https://github.com/jazzband/pip-tools/pull/1022)). Thanks @richafrank
+- Add `pip>=20.0` support
+([#1024](https://github.com/jazzband/pip-tools/pull/1024)). Thanks @atugushev
+
+Bug Fixes:
+- Fix a bug where `pip-compile --upgrade-package` would upgrade those passed packages not already required according to the `*.in` and `*.txt` files
+([#1031](https://github.com/jazzband/pip-tools/pull/1031)). Thanks @AndydeCleyre
+
+# 4.3.0 (2019-11-25)
+
+Features:
+- Add Python 3.8 support
+([#956](https://github.com/jazzband/pip-tools/pull/956)). Thanks @hramezani
+- Unpin commented out unsafe packages in `requirements.txt`
+([#975](https://github.com/jazzband/pip-tools/pull/975)). Thanks @atugushev
+
+Bug Fixes:
+- Fix `pip-compile` doesn't copy `--trusted-host` from `requirements.in` to `requirements.txt`
+([#964](https://github.com/jazzband/pip-tools/pull/964)). Thanks @atugushev
+- Add compatibility with `pip>=20.0`
+([#953](https://github.com/jazzband/pip-tools/pull/953) and [#978](https://github.com/jazzband/pip-tools/pull/978)). Thanks @atugushev
+- Fix a bug where the resolver wouldn't clean up the ephemeral wheel cache
+([#968](https://github.com/jazzband/pip-tools/pull/968)). Thanks @atugushev
+
+Improved Documentation:
+- Add a note to `README` about `requirements.txt` file, which would possibly interfere if you're compiling from scratch
+([#959](https://github.com/jazzband/pip-tools/pull/959)). Thanks @hramezani
+
+# 4.2.0 (2019-10-12)
+
+Features:
+- Add `--ask` option to `pip-sync`
+([#913](https://github.com/jazzband/pip-tools/pull/913)). Thanks @georgek
+
+Bug Fixes:
+- Add compatibility with `pip>=19.3`
+([#864](https://github.com/jazzband/pip-tools/pull/864), [#904](https://github.com/jazzband/pip-tools/pull/904), [#910](https://github.com/jazzband/pip-tools/pull/910), [#912](https://github.com/jazzband/pip-tools/pull/912) and [#915](https://github.com/jazzband/pip-tools/pull/915)). Thanks @atugushev
+- Ensure `pip-compile --no-header <blank requirements.in>` creates/overwrites `requirements.txt`
+([#909](https://github.com/jazzband/pip-tools/pull/909)). Thanks @AndydeCleyre
+- Fix `pip-compile --upgrade-package` removes «via» annotation
+([#931](https://github.com/jazzband/pip-tools/pull/931)). Thanks @hramezani
+
+Improved Documentation:
+- Add info to `README` about layered requirements files and `-c` flag
+([#905](https://github.com/jazzband/pip-tools/pull/905)). Thanks @jamescooke
+
+# 4.1.0 (2019-08-26)
+
+Features:
+- Add `--no-emit-find-links` option to `pip-compile`
+([#873](https://github.com/jazzband/pip-tools/pull/873)). Thanks @jacobtolar
+
+Bug Fixes:
+- Prevent `--dry-run` log message from being printed with `--quiet` option in `pip-compile`
+([#861](https://github.com/jazzband/pip-tools/pull/861)). Thanks @ddormer
+- Fix resolution of requirements from Git URLs without `-e`
+([#879](https://github.com/jazzband/pip-tools/pull/879)). Thanks @andersk
+
+# 4.0.0 (2019-07-25)
+
+Backwards Incompatible Changes:
+- Drop support for EOL Python 3.4
+([#803](https://github.com/jazzband/pip-tools/pull/803)). Thanks @auvipy
+
+Bug Fixes:
+- Fix `pip>=19.2` compatibility
+([#857](https://github.com/jazzband/pip-tools/pull/857)). Thanks @atugushev
+
+# 3.9.0 (2019-07-17)
+
+Features:
+- Print provenance information when `pip-compile` fails
+([#837](https://github.com/jazzband/pip-tools/pull/837)). Thanks @jakevdp
+
+Bug Fixes:
+- Output all logging to stderr instead of stdout
+([#834](https://github.com/jazzband/pip-tools/pull/834)). Thanks @georgek
+- Fix output file update with `--dry-run` option in `pip-compile`
+([#842](https://github.com/jazzband/pip-tools/pull/842)). Thanks @shipmints and @atugushev
+
+# 3.8.0 (2019-06-06)
+
+Features:
+- Options `--upgrade` and `--upgrade-package` are no longer mutually exclusive
+([#831](https://github.com/jazzband/pip-tools/pull/831)). Thanks @adamchainz
+
+Bug Fixes:
+- Fix `--generate-hashes` with bare VCS URLs
+([#812](https://github.com/jazzband/pip-tools/pull/812)). Thanks @jcushman
+- Fix issues with `UnicodeError` when installing `pip-tools` from source in some systems
+([#816](https://github.com/jazzband/pip-tools/pull/816)). Thanks @AbdealiJK
+- Respect `--pre` option in the input file
+([#822](https://github.com/jazzband/pip-tools/pull/822)). Thanks @atugushev
+- Option `--upgrade-package` now works even if the output file does not exist
+([#831](https://github.com/jazzband/pip-tools/pull/831)). Thanks @adamchainz
+
+
+# 3.7.0 (2019-05-09)
+
+Features:
+- Show progressbar on generation hashes in `pip-compile` verbose mode
+([#743](https://github.com/jazzband/pip-tools/pull/743)). Thanks @atugushev
+- Add options `--cert` and `--client-cert` to `pip-sync`
+([#798](https://github.com/jazzband/pip-tools/pull/798)). Thanks @atugushev
+- Add support for `--find-links` in `pip-compile` output
+([#793](https://github.com/jazzband/pip-tools/pull/793)). Thanks @estan and @atugushev
+- Normalize «command to run» in `pip-compile` headers
+([#800](https://github.com/jazzband/pip-tools/pull/800)). Thanks @atugushev
+- Support URLs as packages
+([#807](https://github.com/jazzband/pip-tools/pull/807)). Thanks @jcushman, @nim65s and @toejough
+
+Bug Fixes:
+- Fix replacing password to asterisks in `pip-compile`
+([#808](https://github.com/jazzband/pip-tools/pull/808)). Thanks @atugushev
+
+# 3.6.1 (2019-04-24)
+
+Bug Fixes:
+- Fix `pip>=19.1` compatibility
+([#795](https://github.com/jazzband/pip-tools/pull/795)). Thanks @atugushev
+
+# 3.6.0 (2019-04-03)
+
+Features:
+- Show less output on `pip-sync` with `--quiet` option
+([#765](https://github.com/jazzband/pip-tools/pull/765)). Thanks @atugushev
+- Support the flag `--trusted-host` in `pip-sync`
+([#777](https://github.com/jazzband/pip-tools/pull/777)). Thanks @firebirdberlin
+
+# 3.5.0 (2019-03-13)
+
+Features:
+- Show default index url provided by `pip`
+([#735](https://github.com/jazzband/pip-tools/pull/735)). Thanks @atugushev
+- Add an option to allow enabling/disabling build isolation
+([#758](https://github.com/jazzband/pip-tools/pull/758)). Thanks @atugushev
+
+Bug Fixes:
+- Fix the output file for `pip-compile` with an explicit `setup.py` as source file
+([#731](https://github.com/jazzband/pip-tools/pull/731)). Thanks @atugushev
+- Fix order issue with generated lock file when `hashes` and `markers` are used together
+([#763](https://github.com/jazzband/pip-tools/pull/763)). Thanks @milind-shakya-sp
+
+# 3.4.0 (2019-02-19)
+
+Features:
+- Add option `--quiet` to `pip-compile`
+([#720](https://github.com/jazzband/pip-tools/pull/720)). Thanks @bendikro
+- Emit the original command to the `pip-compile`'s header
+([#733](https://github.com/jazzband/pip-tools/pull/733)). Thanks @atugushev
+
+Bug Fixes:
+- Fix `pip-sync` to use pip script depending on a python version
+([#737](https://github.com/jazzband/pip-tools/pull/737)). Thanks @atugushev
+
+# 3.3.2 (2019-01-26)
+
+Bug Fixes:
+- Fix `pip-sync` with a temporary requirement file on Windows
+([#723](https://github.com/jazzband/pip-tools/pull/723)). Thanks @atugushev
+- Fix `pip-sync` to prevent uninstall of stdlib and dev packages
+([#718](https://github.com/jazzband/pip-tools/pull/718)). Thanks @atugushev
+
+# 3.3.1 (2019-01-24)
+
+- Re-release of 3.3.0 after fixing the deployment pipeline
+([#716](https://github.com/jazzband/pip-tools/issues/716)). Thanks @atugushev
+
+# 3.3.0 (2019-01-23)
+(Unreleased - Deployment pipeline issue, see 3.3.1)
+
+Features:
+- Added support of `pip` 19.0
+([#715](https://github.com/jazzband/pip-tools/pull/715)). Thanks @atugushev
+- Add `--allow-unsafe` to update instructions in the generated `requirements.txt`
+([#708](https://github.com/jazzband/pip-tools/pull/708)). Thanks @richafrank
+
+Bug Fixes:
+- Fix `pip-sync` to check hashes
+([#706](https://github.com/jazzband/pip-tools/pull/706)). Thanks @atugushev
+
+# 3.2.0 (2018-12-18)
+
+Features:
+- Apply version constraints specified with package upgrade option (`-P, --upgrade-package`)
+([#694](https://github.com/jazzband/pip-tools/pull/694)). Thanks @richafrank
+
+# 3.1.0 (2018-10-05)
+
+Features:
+- Added support of `pip` 18.1
+([#689](https://github.com/jazzband/pip-tools/pull/689)). Thanks @vphilippon
+
+# 3.0.0 (2018-09-24)
+
+Major changes:
+- Update `pip-tools` for native `pip` 8, 9, 10 and 18 compatibility, un-vendoring `pip` to use the user-installed `pip`
+([#657](https://github.com/jazzband/pip-tools/pull/657) and [#672](https://github.com/jazzband/pip-tools/pull/672)).
+Thanks to @techalchemy, @suutari, @tysonclugg and @vphilippon for contributing on this.
+
+Features:
+- Removed the dependency on the external library `first`
+([#676](https://github.com/jazzband/pip-tools/pull/676)). Thanks @jdufresne
+
+# 2.0.2 (2018-04-28)
+
+Bug Fixes:
+- Added clearer error reporting when skipping pre-releases
+([#655](https://github.com/jazzband/pip-tools/pull/655)). Thanks @WoLpH
+
+# 2.0.1 (2018-04-15)
+
+Bug Fixes:
+- Added missing package data from vendored pip, such as missing cacert.pem file. Thanks @vphilippon
+
+# 2.0.0 (2018-04-15)
+
+Major changes:
+- Vendored `pip` 9.0.3 to keep compatibility for users with `pip` 10.0.0
+([#644](https://github.com/jazzband/pip-tools/pull/644)). Thanks @vphilippon
+
+Features:
+- Improved the speed of pip-compile --generate-hashes by caching the hashes from an existing output file
+([#641](https://github.com/jazzband/pip-tools/pull/641)). Thanks @justicz
+- Added a `pip-sync --user` option to restrict attention to user-local directory
+([#642](https://github.com/jazzband/pip-tools/pull/642)). Thanks @jbergknoff-10e
+- Removed the hard dependency on setuptools
+([#645](https://github.com/jazzband/pip-tools/pull/645)). Thanks @vphilippon
+
+Bug fixes:
+- The pip environment markers on top-level requirements in the source file (requirements.in)
+are now properly handled and will only be processed in the right environment
+([#647](https://github.com/jazzband/pip-tools/pull/647)). Thanks @JoergRittinger
+
+# 1.11.0 (2017-11-30)
+
+Features:
+- Allow editable packages in requirements.in with `pip-compile --generate-hashes` ([#524](https://github.com/jazzband/pip-tools/pull/524)). Thanks @jdufresne
+- Allow for CA bundles with `pip-compile --cert` ([#612](https://github.com/jazzband/pip-tools/pull/612)). Thanks @khwilson
+- Improved `pip-compile` duration with large locally available editable requirement by skipping a copy to the cache
+([#583](https://github.com/jazzband/pip-tools/pull/583)). Thanks @costypetrisor
+- Slightly improved the `NoCandidateFound` error message on potential causes ([#614](https://github.com/jazzband/pip-tools/pull/614)). Thanks @vphilippon
+
+Bug Fixes:
+- Add `-markerlib` to the list of `PACKAGES_TO_IGNORE` of `pip-sync` ([#613](https://github.com/jazzband/pip-tools/pull/613)).
+
+# 1.10.2 (2017-11-22)
+
+Bug Fixes:
+- Fixed bug causing dependencies from invalid wheels for the current platform to be included ([#571](https://github.com/jazzband/pip-tools/pull/571)).
+- `pip-sync` will respect environment markers in the requirements.txt ([600](https://github.com/jazzband/pip-tools/pull/600)). Thanks @hazmat345
+- Converted the ReadMe to have a nice description rendering on PyPI. Thanks @bittner
+
+# 1.10.1 (2017-09-27)
+
+Bug Fixes:
+- Fixed bug breaking `pip-sync` on Python 3, raising `TypeError: '<' not supported between instances of 'InstallRequirement' and 'InstallRequirement'` ([#570](https://github.com/jazzband/pip-tools/pull/570)).
+
+# 1.10.0 (2017-09-27)
+
+Features:
+- `--generate-hashes` now generates hashes for all wheels,
+not only wheels for the currently running platform ([#520](https://github.com/jazzband/pip-tools/pull/520)). Thanks @jdufresne
+- Added a `-q`/`--quiet` argument to the pip-sync command to reduce log output.
+
+Bug Fixes:
+- Fixed bug where unsafe packages would get pinned in generated requirements files
+when `--allow-unsafe` was not set. ([#517](https://github.com/jazzband/pip-tools/pull/517)). Thanks @dschaller
+- Fixed bug where editable PyPI dependencies would have a `download_dir` and be exposed to `git-checkout-index`,
+(thus losing their VCS directory) and `python setup.py egg_info` fails. ([#385](https://github.com/jazzband/pip-tools/pull/385#) and [#538](https://github.com/jazzband/pip-tools/pull/538)). Thanks @blueyed and @dfee
+- Fixed bug where some primary dependencies were annotated with "via" info comments. ([#542](https://github.com/jazzband/pip-tools/pull/542)). Thanks @quantus
+- Fixed bug where pkg-resources would be removed by pip-sync in Ubuntu. ([#555](https://github.com/jazzband/pip-tools/pull/555)). Thanks @cemsbr
+- Fixed bug where the resolver would sometime not stabilize on requirements specifying extras. ([#566](https://github.com/jazzband/pip-tools/pull/566)). Thanks @vphilippon
+- Fixed an unicode encoding error when distribution package contains non-ASCII file names ([#567](https://github.com/jazzband/pip-tools/pull/567)). Thanks @suutari
+- Fixed package hashing doing unnecessary unpacking ([#557](https://github.com/jazzband/pip-tools/pull/557)). Thanks @suutari-ai
+
+# 1.9.0 (2017-04-12)
+
+Features:
+- Added ability to read requirements from `setup.py` instead of just `requirements.in` ([#418](https://github.com/jazzband/pip-tools/pull/418)). Thanks to @tysonclugg and @majuscule.
+- Added a `--max-rounds` argument to the pip-compile command to allow for solving large requirement sets ([#472](https://github.com/jazzband/pip-tools/pull/472)). Thanks @derek-miller.
+- Exclude unsafe packages' dependencies when `--allow-unsafe` is not in use ([#441](https://github.com/jazzband/pip-tools/pull/441)). Thanks @jdufresne.
+- Exclude irrelevant pip constraints ([#471](https://github.com/jazzband/pip-tools/pull/471)). Thanks @derek-miller.
+- Allow control over emitting trusted-host to the compiled requirements. ([#448](https://github.com/jazzband/pip-tools/pull/448)). Thanks @tonyseek.
+- Allow running as a Python module (#[461](https://github.com/jazzband/pip-tools/pull/461)). Thanks @AndreLouisCaron.
+- Preserve environment markers in generated requirements.txt. ([#460](https://github.com/jazzband/pip-tools/pull/460)). Thanks @barrywhart.
+
+Bug Fixes:
+- Fixed the --upgrade-package option to respect the given package list to update ([#491](https://github.com/jazzband/pip-tools/pull/491)).
+- Fixed the default output file name when the source file has no extension ([#488](https://github.com/jazzband/pip-tools/pull/488)). Thanks @vphilippon
+- Fixed crash on editable requirements introduced in 1.8.2.
+- Fixed duplicated --trusted-host, --extra-index-url and --index-url in the generated requirements.
+
+# 1.8.2 (2017-03-28)
+
+- Regression fix: editable reqs were loosing their dependencies after first round ([#476](https://github.com/jazzband/pip-tools/pull/476))
+ Thanks @mattlong
+- Remove duplicate index urls in generated requirements.txt ([#468](https://github.com/jazzband/pip-tools/pull/468))
+ Thanks @majuscule
+
+# 1.8.1 (2017-03-22)
+
+- Recalculate secondary dependencies between rounds (#378)
+- Calculated dependencies could be left with wrong candidates when
+ toplevel requirements happen to be also pinned in sub-dependencies (#450)
+- Fix duplicate entries that could happen in generated requirements.txt (#427)
+- Gracefully report invalid pip version (#457)
+- Fix capitalization in the generated requirements.txt, packages will always be lowercased (#452)
+
+# 1.8.0 (2016-11-17)
+
+- Adds support for upgrading individual packages with a new option
+ `--upgrade-package`. To upgrade a _specific_ package to the latest or
+ a specific version use `--upgrade-package <pkg>`. To upgrade all packages,
+ you can still use `pip-compile --upgrade`. (#409)
+- Adds support for pinning dependencies even further by including the hashes
+ found on PyPI at compilation time, which will be re-checked when dependencies
+ are installed at installation time. This adds protection against packages
+ that are tampered with. (#383)
+- Improve support for extras, like `hypothesis[django]`
+- Drop support for pip < 8
+
+
+# 1.7.1 (2016-10-20)
+
+- Add `--allow-unsafe` option (#377)
+
+
+# 1.7.0 (2016-07-06)
+
+- Add compatibility with pip >= 8.1.2 (#374)
+ Thanks so much, @jmbowman!
+
+
+# 1.6.5 (2016-05-11)
+
+- Add warning that pip >= 8.1.2 is not supported until 1.7.x is out
+
+
+# 1.6.4 (2016-05-03)
+
+- Incorporate fix for atomic file saving behaviour on the Windows platform
+ (see #351)
+
+
+# 1.6.3 (2016-05-02)
+
+- PyPI won't let me upload 1.6.2
+
+
+# 1.6.2 (2016-05-02)
+
+- Respect pip configuration from pip.{ini,conf}
+- Fixes for atomic-saving of output files on Windows (see #351)
+
+
+# 1.6.1 (2016-04-06)
+
+Minor changes:
+- pip-sync now supports being invoked from within and outside an activated
+ virtualenv (see #317)
+- pip-compile: support -U as a shorthand for --upgrade
+- pip-compile: support pip's --no-binary and --binary-only flags
+
+Fixes:
+- Change header format of output files to mention all input files
+
+
+# 1.6 (2016-02-05)
+
+Major change:
+- pip-compile will by default try to fulfill package specs by looking at
+ a previously compiled output file first, before checking PyPI. This means
+ pip-compile will only update the requirements.txt when it absolutely has to.
+ To get the old behaviour (picking the latest version of all packages from
+ PyPI), use the new `--upgrade` option.
+
+Minor changes:
+- Bugfix where pip-compile would lose "via" info when on pip 8 (see #313)
+- Ensure cache dir exists (see #315)
+
+
+# 1.5 (2016-01-23)
+
+- Add support for pip >= 8
+- Drop support for pip < 7
+- Fix bug where `pip-sync` fails to uninstall packages if you're using the
+ `--no-index` (or other) flags
+
+
+# 1.4.5 (2016-01-20)
+
+- Add `--no-index` flag to `pip-compile` to avoid emitting `--index-url` into
+ the output (useful if you have configured a different index in your global
+ ~/.pip/pip.conf, for example)
+- Fix: ignore stdlib backport packages, like `argparse`, when listing which
+ packages will be installed/uninstalled (#286)
+- Fix pip-sync failed uninstalling packages when using `--find-links` (#298)
+- Explicitly error when pip-tools is used with pip 8.0+ (for now)
+
+
+# 1.4.4 (2016-01-11)
+
+- Fix: unintended change in behaviour where packages installed by `pip-sync`
+ could accidentally get upgraded under certain conditions, even though the
+ requirements.txt would dictate otherwise (see #290)
+
+
+# 1.4.3 (2016-01-06)
+
+- Fix: add `--index-url` and `--extra-index-url` options to `pip-sync`
+- Fix: always install using `--upgrade` flag when running `pip-sync`
+
+
+# 1.4.2 (2015-12-13)
+
+- Fix bug where umask was ignored when writing requirement files (#268)
+
+
+# 1.4.1 (2015-12-13)
+
+- Fix bug where successive invocations of pip-sync with editables kept
+ uninstalling/installing them (fixes #270)
+
+
+# 1.4.0 (2015-12-13)
+
+- Add command line option -f / --find-links
+- Add command line option --no-index
+- Add command line alias -n (for --dry-run)
+- Fix a unicode issue
+
+
+# 1.3.0 (2015-12-08)
+
+- Support multiple requirement files to pip-compile
+- Support requirements from stdin for pip-compile
+- Support --output-file option on pip-compile, to redirect output to a file (or stdout)
+
+
+# 1.2.0 (2015-11-30)
+
+- Add CHANGELOG :)
+- Support pip-sync'ing editable requirements
+- Support extras properly (i.e. package[foo] syntax)
+
+(Anything before 1.2.0 was not recorded.)
diff --git a/third_party/python/pip-tools/CONTRIBUTING.md b/third_party/python/pip-tools/CONTRIBUTING.md
new file mode 100644
index 0000000000..7a3c83bf9a
--- /dev/null
+++ b/third_party/python/pip-tools/CONTRIBUTING.md
@@ -0,0 +1,49 @@
+[![Jazzband](https://jazzband.co/static/img/jazzband.svg)](https://jazzband.co/)
+
+This is a [Jazzband](https://jazzband.co/) project. By contributing you agree
+to abide by the [Contributor Code of Conduct](https://jazzband.co/about/conduct)
+and follow the [guidelines](https://jazzband.co/about/guidelines).
+
+## Project Contribution Guidelines
+
+Here are a few additional or emphasized guidelines to follow when contributing to pip-tools:
+- Install pip-tools in development mode and its test dependencies with `pip install -e .[testing]`.
+- Check with `tox -e checkqa` to see your changes are not breaking the style conventions.
+- Always provide tests for your changes.
+- Give a clear one-line description in the PR (that the maintainers can add to [CHANGELOG](CHANGELOG.md) afterwards).
+- Wait for the review of at least one other contributor before merging (even if you're a Jazzband member).
+- Before merging, assign the PR to a milestone for a version to help with the release process.
+
+The only exception to those guidelines is for trivial changes, such as
+documentation corrections or contributions that do not change pip-tools itself.
+
+Contributions following these guidelines are always welcomed, encouraged and appreciated.
+
+## Project Release Process
+
+Jazzband aims to give full access to all members, including performing releases, as described in the
+[Jazzband Releases documentation](https://jazzband.co/about/releases).
+
+To help keeping track of the releases and their changes, here's the current release process:
+- Check to see if any recently merged PRs are missing from the milestone of the version about to be released.
+- Create a branch for the release. *Ex: release-3.4.0*.
+- Update the [CHANGELOG](CHANGELOG.md) with the version, date and using the one-line descriptions
+ from the PRs included in the milestone of the version.
+ Check the previous release changelog format for an example. Don't forget the "Thanks @contributor" mentions.
+- Push the branch to your fork and create a pull request.
+- Merge the pull request after the changes being approved.
+- Make sure that the tests/CI still pass.
+- Once ready, go to `Github pip-tools Homepage > releases tab > Draft a new release` and type in:
+ - *Tag version:* The exact version number, following [Semantic Versioning](https://blog.versioneye.com/2014/01/16/semantic-versioning/). *Ex: 3.4.0*
+ - *Target:* master. As a general rule, the HEAD commit of the master branch should be the release target.
+ - *Release title:* Same as the tag. *Ex: 3.4.0*
+ - *Describe this release:* Copy of this release's changelog segment.
+- Publish release. This will push a tag on the HEAD of master, trigger the CI pipeline and
+ deploy a pip-tools release in the **Jazzband private package index** upon success.
+- The pip-tools "lead" project members will receive an email notification to review the release and
+ deploy it to the public PyPI if all is correct.
+- Once the release to the public PyPI is confirmed, close the milestone.
+
+Please be mindful of other before and when performing a release, and use this access responsibly.
+
+Do not hesitate to ask questions if you have any before performing a release.
diff --git a/third_party/python/pip-tools/LICENSE b/third_party/python/pip-tools/LICENSE
new file mode 100644
index 0000000000..64719ca9f5
--- /dev/null
+++ b/third_party/python/pip-tools/LICENSE
@@ -0,0 +1,26 @@
+Copyright (c). All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of pip-tools nor the names of its contributors may be
+ used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/pip-tools/PKG-INFO b/third_party/python/pip-tools/PKG-INFO
new file mode 100644
index 0000000000..ea065f0495
--- /dev/null
+++ b/third_party/python/pip-tools/PKG-INFO
@@ -0,0 +1,495 @@
+Metadata-Version: 2.1
+Name: pip-tools
+Version: 5.3.1
+Summary: pip-tools keeps your pinned dependencies fresh.
+Home-page: https://github.com/jazzband/pip-tools/
+Author: Vincent Driessen
+Author-email: me@nvie.com
+License: BSD
+Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-appveyor| |codecov|
+
+ ==================================
+ pip-tools = pip-compile + pip-sync
+ ==================================
+
+ A set of command line tools to help you keep your ``pip``-based packages fresh,
+ even when you've pinned them. You do pin them, right? (In building your Python application and its dependencies for production, you want to make sure that your builds are predictable and deterministic.)
+
+ .. image:: https://github.com/jazzband/pip-tools/raw/master/img/pip-tools-overview.png
+ :alt: pip-tools overview for phase II
+
+ .. |buildstatus-travis| image:: https://img.shields.io/travis/jazzband/pip-tools/master.svg?logo=travis
+ :alt: Travis CI build status
+ :target: https://travis-ci.org/jazzband/pip-tools
+ .. |buildstatus-appveyor| image:: https://img.shields.io/appveyor/ci/jazzband/pip-tools/master.svg?logo=appveyor
+ :alt: AppVeyor build status
+ :target: https://ci.appveyor.com/project/jazzband/pip-tools
+ .. |codecov| image:: https://codecov.io/gh/jazzband/pip-tools/branch/master/graph/badge.svg
+ :alt: Coverage
+ :target: https://codecov.io/gh/jazzband/pip-tools
+ .. |jazzband| image:: https://jazzband.co/static/img/badge.svg
+ :alt: Jazzband
+ :target: https://jazzband.co/
+ .. |pypi| image:: https://img.shields.io/pypi/v/pip-tools.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/pip-tools/
+ .. |pyversions| image:: https://img.shields.io/pypi/pyversions/pip-tools.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/pip-tools/
+ .. _You do pin them, right?: http://nvie.com/posts/pin-your-packages/
+
+
+ Installation
+ ============
+
+ Similar to ``pip``, ``pip-tools`` must be installed in each of your project's
+ `virtual environments`_:
+
+ .. code-block:: bash
+
+ $ source /path/to/venv/bin/activate
+ (venv)$ python -m pip install pip-tools
+
+ **Note**: all of the remaining example commands assume you've activated your
+ project's virtual environment.
+
+ .. _virtual environments: https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments
+
+ Example usage for ``pip-compile``
+ =================================
+
+ The ``pip-compile`` command lets you compile a ``requirements.txt`` file from
+ your dependencies, specified in either ``setup.py`` or ``requirements.in``.
+
+ Run it with ``pip-compile`` or ``python -m piptools compile``. If you use
+ multiple Python versions, you can run ``pip-compile`` as ``py -X.Y -m piptools
+ compile`` on Windows and ``pythonX.Y -m piptools compile`` on other systems.
+
+ ``pip-compile`` should be run from the same virtual environment as your
+ project so conditional dependencies that require a specific Python version,
+ or other environment markers, resolve relative to your project's
+ environment.
+
+ **Note**: ensure you don't have ``requirements.txt`` if you compile
+ ``setup.py`` or ``requirements.in`` from scratch, otherwise, it might
+ interfere.
+
+ Requirements from ``setup.py``
+ ------------------------------
+
+ Suppose you have a Django project, and want to pin it for production.
+ If you have a ``setup.py`` with ``install_requires=['django']``, then run
+ ``pip-compile`` without any arguments:
+
+ .. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via my_django_project (setup.py)
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+ ``pip-compile`` will produce your ``requirements.txt``, with all the Django
+ dependencies (and all underlying dependencies) pinned.
+
+ Without ``setup.py``
+ --------------------
+
+ If you don't use ``setup.py`` (`it's easy to write one`_), you can create a
+ ``requirements.in`` file to declare the Django dependency:
+
+ .. code-block:: ini
+
+ # requirements.in
+ django
+
+ Now, run ``pip-compile requirements.in``:
+
+ .. code-block:: bash
+
+ $ pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile requirements.in
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+ And it will produce your ``requirements.txt``, with all the Django dependencies
+ (and all underlying dependencies) pinned.
+
+ .. _it's easy to write one: https://packaging.python.org/guides/distributing-packages-using-setuptools/#configuring-your-project
+
+ Using hashes
+ ------------
+
+ If you would like to use *Hash-Checking Mode* available in ``pip`` since
+ version 8.0, ``pip-compile`` offers ``--generate-hashes`` flag:
+
+ .. code-block:: bash
+
+ $ pip-compile --generate-hashes requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile --generate-hashes requirements.in
+ #
+ asgiref==3.2.3 \
+ --hash=sha256:7e06d934a7718bf3975acbf87780ba678957b87c7adc056f13b6215d610695a0 \
+ --hash=sha256:ea448f92fc35a0ef4b1508f53a04c4670255a3f33d22a81c8fc9c872036adbe5 \
+ # via django
+ django==3.0.3 \
+ --hash=sha256:2f1ba1db8648484dd5c238fb62504777b7ad090c81c5f1fd8d5eb5ec21b5f283 \
+ --hash=sha256:c91c91a7ad6ef67a874a4f76f58ba534f9208412692a840e1d125eb5c279cb0a \
+ # via -r requirements.in
+ pytz==2019.3 \
+ --hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
+ --hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be \
+ # via django
+ sqlparse==0.3.0 \
+ --hash=sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177 \
+ --hash=sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873 \
+ # via django
+
+ Updating requirements
+ ---------------------
+
+ To update all packages, periodically re-run ``pip-compile --upgrade``.
+
+ To update a specific package to the latest or a specific version use the
+ ``--upgrade-package`` or ``-P`` flag:
+
+ .. code-block:: bash
+
+ # only update the django package
+ $ pip-compile --upgrade-package django
+
+ # update both the django and requests packages
+ $ pip-compile --upgrade-package django --upgrade-package requests
+
+ # update the django package to the latest, and requests to v2.0.0
+ $ pip-compile --upgrade-package django --upgrade-package requests==2.0.0
+
+ You can combine ``--upgrade`` and ``--upgrade-package`` in one command, to
+ provide constraints on the allowed upgrades. For example to upgrade all
+ packages whilst constraining requests to the latest version less than 3.0:
+
+ .. code-block:: bash
+
+ $ pip-compile --upgrade --upgrade-package 'requests<3.0'
+
+ Output File
+ -----------
+
+ To output the pinned requirements in a filename other than
+ ``requirements.txt``, use ``--output-file``. This might be useful for compiling
+ multiple files, for example with different constraints on django to test a
+ library with both versions using `tox <https://tox.readthedocs.io/en/latest/>`__:
+
+ .. code-block:: bash
+
+ $ pip-compile --upgrade-package 'django<1.0' --output-file requirements-django0x.txt
+ $ pip-compile --upgrade-package 'django<2.0' --output-file requirements-django1x.txt
+
+ Or to output to standard output, use ``--output-file=-``:
+
+ .. code-block:: bash
+
+ $ pip-compile --output-file=- > requirements.txt
+ $ pip-compile - --output-file=- < requirements.in > requirements.txt
+
+ Forwarding options to ``pip``
+ -----------------------------
+
+ Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
+ ``--pip-args`` option, e.g.
+
+ .. code-block:: bash
+
+ $ pip-compile requirements.in --pip-args '--retries 10 --timeout 30'
+
+ Configuration
+ -------------
+
+ You might be wrapping the ``pip-compile`` command in another script. To avoid
+ confusing consumers of your custom script you can override the update command
+ generated at the top of requirements files by setting the
+ ``CUSTOM_COMPILE_COMMAND`` environment variable.
+
+ .. code-block:: bash
+
+ $ CUSTOM_COMPILE_COMMAND="./pipcompilewrapper" pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # ./pipcompilewrapper
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+ Workflow for layered requirements
+ ---------------------------------
+
+ If you have different environments that you need to install different but
+ compatible packages for, then you can create layered requirements files and use
+ one layer to constrain the other.
+
+ For example, if you have a Django project where you want the newest ``2.1``
+ release in production and when developing you want to use the Django debug
+ toolbar, then you can create two ``*.in`` files, one for each layer:
+
+ .. code-block:: ini
+
+ # requirements.in
+ django<2.2
+
+ At the top of the development requirements ``dev-requirements.in`` you use ``-c
+ requirements.txt`` to constrain the dev requirements to packages already
+ selected for production in ``requirements.txt``.
+
+ .. code-block:: ini
+
+ # dev-requirements.in
+ -c requirements.txt
+ django-debug-toolbar
+
+ First, compile ``requirements.txt`` as usual:
+
+ .. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ django==2.1.15 # via -r requirements.in
+ pytz==2019.3 # via django
+
+
+ Now compile the dev requirements and the ``requirements.txt`` file is used as
+ a constraint:
+
+ .. code-block:: bash
+
+ $ pip-compile dev-requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile dev-requirements.in
+ #
+ django-debug-toolbar==2.2 # via -r dev-requirements.in
+ django==2.1.15 # via -c requirements.txt, django-debug-toolbar
+ pytz==2019.3 # via -c requirements.txt, django
+ sqlparse==0.3.0 # via django-debug-toolbar
+
+ As you can see above, even though a ``2.2`` release of Django is available, the
+ dev requirements only include a ``2.1`` version of Django because they were
+ constrained. Now both compiled requirements files can be installed safely in
+ the dev environment.
+
+ To install requirements in production stage use:
+
+ .. code-block:: bash
+
+ $ pip-sync
+
+ You can install requirements in development stage by:
+
+ .. code-block:: bash
+
+ $ pip-sync requirements.txt dev-requirements.txt
+
+
+ Version control integration
+ ---------------------------
+
+ You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
+ See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
+ Sample ``.pre-commit-config.yaml``:
+
+ .. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+
+ You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
+
+ .. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+ files: ^requirements/production\.(in|txt)$
+ args: [--index-url=https://example.com, requirements/production.in]
+
+
+ Example usage for ``pip-sync``
+ ==============================
+
+ Now that you have a ``requirements.txt``, you can use ``pip-sync`` to update
+ your virtual environment to reflect exactly what's in there. This will
+ install/upgrade/uninstall everything necessary to match the
+ ``requirements.txt`` contents.
+
+ Run it with ``pip-sync`` or ``python -m piptools sync``. If you use multiple
+ Python versions, you can also run ``py -X.Y -m piptools sync`` on Windows and
+ ``pythonX.Y -m piptools sync`` on other systems.
+
+ ``pip-sync`` must be installed into and run from the same virtual
+ environment as your project to identify which packages to install
+ or upgrade.
+
+ **Be careful**: ``pip-sync`` is meant to be used only with a
+ ``requirements.txt`` generated by ``pip-compile``.
+
+ .. code-block:: bash
+
+ $ pip-sync
+ Uninstalling flake8-2.4.1:
+ Successfully uninstalled flake8-2.4.1
+ Collecting click==4.1
+ Downloading click-4.1-py2.py3-none-any.whl (62kB)
+ 100% |................................| 65kB 1.8MB/s
+ Found existing installation: click 4.0
+ Uninstalling click-4.0:
+ Successfully uninstalled click-4.0
+ Successfully installed click-4.1
+
+ To sync multiple ``*.txt`` dependency lists, just pass them in via command
+ line arguments, e.g.
+
+ .. code-block:: bash
+
+ $ pip-sync dev-requirements.txt requirements.txt
+
+ Passing in empty arguments would cause it to default to ``requirements.txt``.
+
+ Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
+ ``--pip-args`` option, e.g.
+
+ .. code-block:: bash
+
+ $ pip-sync requirements.txt --pip-args '--no-cache-dir --no-deps'
+
+ If you use multiple Python versions, you can run ``pip-sync`` as
+ ``py -X.Y -m piptools sync ...`` on Windows and
+ ``pythonX.Y -m piptools sync ...`` on other systems.
+
+ **Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
+ ``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
+ to upgrade those packages.
+
+ Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
+ ===============================================================================
+
+ Generally, yes. If you want a reproducible environment installation available from your source control,
+ then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
+
+ Note that if you are deploying on multiple Python environments (read the section below),
+ then you must commit a seperate output file for each Python environment.
+ We suggest to use the ``{env}-requirements.txt`` format
+ (ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.).
+
+
+ Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
+ =======================================================================================
+
+ The dependencies of a package can change depending on the Python environment in which it
+ is installed. Here, we define a Python environment as the combination of Operating
+ System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy,
+ etc.). For an exact definition, refer to the possible combinations of `PEP 508
+ environment markers`_.
+
+ As the resulting ``requirements.txt`` can differ for each environment, users must
+ execute ``pip-compile`` **on each Python environment separately** to generate a
+ ``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
+ be used as the source file for all environments, using `PEP 508 environment markers`_ as
+ needed, the same way it would be done for regular ``pip`` cross-environment usage.
+
+ If the generated ``requirements.txt`` remains exactly the same for all Python
+ environments, then it can be used across Python environments safely. **But** users
+ should be careful as any package update can introduce environment-dependant
+ dependencies, making any newly generated ``requirements.txt`` environment-dependant too.
+ As a general rule, it's advised that users should still always execute ``pip-compile``
+ on each targeted Python environment to avoid issues.
+
+ .. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
+
+ Other useful tools
+ ==================
+
+ - `pipdeptree`_ to print the dependency tree of the installed packages.
+ - ``requirements.in``/``requirements.txt`` syntax highlighting:
+
+ * `requirements.txt.vim`_ for Vim.
+ * `Python extension for VS Code`_ for VS Code.
+
+ .. _pipdeptree: https://github.com/naiquevin/pipdeptree
+ .. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
+ .. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
+
+
+ Deprecations
+ ============
+
+ This section lists ``pip-tools`` features that are currently deprecated.
+
+ - ``--index/--no-index`` command-line options, use instead
+ ``--emit-index-url/--no-emit-index-url`` (since 5.2.0).
+
+ Versions and compatibility
+ ==========================
+
+ The table below summarizes the latest ``pip-tools`` versions with the required ``pip``
+ versions.
+
+ +-----------+-----------------+
+ | pip-tools | pip |
+ +===========+=================+
+ | 4.5.x | 8.1.3 - 20.0.x |
+ +-----------+-----------------+
+ | 5.x | 20.0.x - 20.1.x |
+ +-----------+-----------------+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: System :: Systems Administration
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Provides-Extra: testing
+Provides-Extra: coverage
diff --git a/third_party/python/pip-tools/README.rst b/third_party/python/pip-tools/README.rst
new file mode 100644
index 0000000000..8f045a989a
--- /dev/null
+++ b/third_party/python/pip-tools/README.rst
@@ -0,0 +1,466 @@
+|jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-appveyor| |codecov|
+
+==================================
+pip-tools = pip-compile + pip-sync
+==================================
+
+A set of command line tools to help you keep your ``pip``-based packages fresh,
+even when you've pinned them. You do pin them, right? (In building your Python application and its dependencies for production, you want to make sure that your builds are predictable and deterministic.)
+
+.. image:: https://github.com/jazzband/pip-tools/raw/master/img/pip-tools-overview.png
+ :alt: pip-tools overview for phase II
+
+.. |buildstatus-travis| image:: https://img.shields.io/travis/jazzband/pip-tools/master.svg?logo=travis
+ :alt: Travis CI build status
+ :target: https://travis-ci.org/jazzband/pip-tools
+.. |buildstatus-appveyor| image:: https://img.shields.io/appveyor/ci/jazzband/pip-tools/master.svg?logo=appveyor
+ :alt: AppVeyor build status
+ :target: https://ci.appveyor.com/project/jazzband/pip-tools
+.. |codecov| image:: https://codecov.io/gh/jazzband/pip-tools/branch/master/graph/badge.svg
+ :alt: Coverage
+ :target: https://codecov.io/gh/jazzband/pip-tools
+.. |jazzband| image:: https://jazzband.co/static/img/badge.svg
+ :alt: Jazzband
+ :target: https://jazzband.co/
+.. |pypi| image:: https://img.shields.io/pypi/v/pip-tools.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/pip-tools/
+.. |pyversions| image:: https://img.shields.io/pypi/pyversions/pip-tools.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/pip-tools/
+.. _You do pin them, right?: http://nvie.com/posts/pin-your-packages/
+
+
+Installation
+============
+
+Similar to ``pip``, ``pip-tools`` must be installed in each of your project's
+`virtual environments`_:
+
+.. code-block:: bash
+
+ $ source /path/to/venv/bin/activate
+ (venv)$ python -m pip install pip-tools
+
+**Note**: all of the remaining example commands assume you've activated your
+project's virtual environment.
+
+.. _virtual environments: https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments
+
+Example usage for ``pip-compile``
+=================================
+
+The ``pip-compile`` command lets you compile a ``requirements.txt`` file from
+your dependencies, specified in either ``setup.py`` or ``requirements.in``.
+
+Run it with ``pip-compile`` or ``python -m piptools compile``. If you use
+multiple Python versions, you can run ``pip-compile`` as ``py -X.Y -m piptools
+compile`` on Windows and ``pythonX.Y -m piptools compile`` on other systems.
+
+``pip-compile`` should be run from the same virtual environment as your
+project so conditional dependencies that require a specific Python version,
+or other environment markers, resolve relative to your project's
+environment.
+
+**Note**: ensure you don't have ``requirements.txt`` if you compile
+``setup.py`` or ``requirements.in`` from scratch, otherwise, it might
+interfere.
+
+Requirements from ``setup.py``
+------------------------------
+
+Suppose you have a Django project, and want to pin it for production.
+If you have a ``setup.py`` with ``install_requires=['django']``, then run
+``pip-compile`` without any arguments:
+
+.. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via my_django_project (setup.py)
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+``pip-compile`` will produce your ``requirements.txt``, with all the Django
+dependencies (and all underlying dependencies) pinned.
+
+Without ``setup.py``
+--------------------
+
+If you don't use ``setup.py`` (`it's easy to write one`_), you can create a
+``requirements.in`` file to declare the Django dependency:
+
+.. code-block:: ini
+
+ # requirements.in
+ django
+
+Now, run ``pip-compile requirements.in``:
+
+.. code-block:: bash
+
+ $ pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile requirements.in
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+And it will produce your ``requirements.txt``, with all the Django dependencies
+(and all underlying dependencies) pinned.
+
+.. _it's easy to write one: https://packaging.python.org/guides/distributing-packages-using-setuptools/#configuring-your-project
+
+Using hashes
+------------
+
+If you would like to use *Hash-Checking Mode* available in ``pip`` since
+version 8.0, ``pip-compile`` offers ``--generate-hashes`` flag:
+
+.. code-block:: bash
+
+ $ pip-compile --generate-hashes requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile --generate-hashes requirements.in
+ #
+ asgiref==3.2.3 \
+ --hash=sha256:7e06d934a7718bf3975acbf87780ba678957b87c7adc056f13b6215d610695a0 \
+ --hash=sha256:ea448f92fc35a0ef4b1508f53a04c4670255a3f33d22a81c8fc9c872036adbe5 \
+ # via django
+ django==3.0.3 \
+ --hash=sha256:2f1ba1db8648484dd5c238fb62504777b7ad090c81c5f1fd8d5eb5ec21b5f283 \
+ --hash=sha256:c91c91a7ad6ef67a874a4f76f58ba534f9208412692a840e1d125eb5c279cb0a \
+ # via -r requirements.in
+ pytz==2019.3 \
+ --hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
+ --hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be \
+ # via django
+ sqlparse==0.3.0 \
+ --hash=sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177 \
+ --hash=sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873 \
+ # via django
+
+Updating requirements
+---------------------
+
+To update all packages, periodically re-run ``pip-compile --upgrade``.
+
+To update a specific package to the latest or a specific version use the
+``--upgrade-package`` or ``-P`` flag:
+
+.. code-block:: bash
+
+ # only update the django package
+ $ pip-compile --upgrade-package django
+
+ # update both the django and requests packages
+ $ pip-compile --upgrade-package django --upgrade-package requests
+
+ # update the django package to the latest, and requests to v2.0.0
+ $ pip-compile --upgrade-package django --upgrade-package requests==2.0.0
+
+You can combine ``--upgrade`` and ``--upgrade-package`` in one command, to
+provide constraints on the allowed upgrades. For example to upgrade all
+packages whilst constraining requests to the latest version less than 3.0:
+
+.. code-block:: bash
+
+ $ pip-compile --upgrade --upgrade-package 'requests<3.0'
+
+Output File
+-----------
+
+To output the pinned requirements in a filename other than
+``requirements.txt``, use ``--output-file``. This might be useful for compiling
+multiple files, for example with different constraints on django to test a
+library with both versions using `tox <https://tox.readthedocs.io/en/latest/>`__:
+
+.. code-block:: bash
+
+ $ pip-compile --upgrade-package 'django<1.0' --output-file requirements-django0x.txt
+ $ pip-compile --upgrade-package 'django<2.0' --output-file requirements-django1x.txt
+
+Or to output to standard output, use ``--output-file=-``:
+
+.. code-block:: bash
+
+ $ pip-compile --output-file=- > requirements.txt
+ $ pip-compile - --output-file=- < requirements.in > requirements.txt
+
+Forwarding options to ``pip``
+-----------------------------
+
+Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
+``--pip-args`` option, e.g.
+
+.. code-block:: bash
+
+ $ pip-compile requirements.in --pip-args '--retries 10 --timeout 30'
+
+Configuration
+-------------
+
+You might be wrapping the ``pip-compile`` command in another script. To avoid
+confusing consumers of your custom script you can override the update command
+generated at the top of requirements files by setting the
+``CUSTOM_COMPILE_COMMAND`` environment variable.
+
+.. code-block:: bash
+
+ $ CUSTOM_COMPILE_COMMAND="./pipcompilewrapper" pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # ./pipcompilewrapper
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+Workflow for layered requirements
+---------------------------------
+
+If you have different environments that you need to install different but
+compatible packages for, then you can create layered requirements files and use
+one layer to constrain the other.
+
+For example, if you have a Django project where you want the newest ``2.1``
+release in production and when developing you want to use the Django debug
+toolbar, then you can create two ``*.in`` files, one for each layer:
+
+.. code-block:: ini
+
+ # requirements.in
+ django<2.2
+
+At the top of the development requirements ``dev-requirements.in`` you use ``-c
+requirements.txt`` to constrain the dev requirements to packages already
+selected for production in ``requirements.txt``.
+
+.. code-block:: ini
+
+ # dev-requirements.in
+ -c requirements.txt
+ django-debug-toolbar
+
+First, compile ``requirements.txt`` as usual:
+
+.. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ django==2.1.15 # via -r requirements.in
+ pytz==2019.3 # via django
+
+
+Now compile the dev requirements and the ``requirements.txt`` file is used as
+a constraint:
+
+.. code-block:: bash
+
+ $ pip-compile dev-requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile dev-requirements.in
+ #
+ django-debug-toolbar==2.2 # via -r dev-requirements.in
+ django==2.1.15 # via -c requirements.txt, django-debug-toolbar
+ pytz==2019.3 # via -c requirements.txt, django
+ sqlparse==0.3.0 # via django-debug-toolbar
+
+As you can see above, even though a ``2.2`` release of Django is available, the
+dev requirements only include a ``2.1`` version of Django because they were
+constrained. Now both compiled requirements files can be installed safely in
+the dev environment.
+
+To install requirements in production stage use:
+
+.. code-block:: bash
+
+ $ pip-sync
+
+You can install requirements in development stage by:
+
+.. code-block:: bash
+
+ $ pip-sync requirements.txt dev-requirements.txt
+
+
+Version control integration
+---------------------------
+
+You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
+See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
+Sample ``.pre-commit-config.yaml``:
+
+.. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+
+You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
+
+.. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+ files: ^requirements/production\.(in|txt)$
+ args: [--index-url=https://example.com, requirements/production.in]
+
+
+Example usage for ``pip-sync``
+==============================
+
+Now that you have a ``requirements.txt``, you can use ``pip-sync`` to update
+your virtual environment to reflect exactly what's in there. This will
+install/upgrade/uninstall everything necessary to match the
+``requirements.txt`` contents.
+
+Run it with ``pip-sync`` or ``python -m piptools sync``. If you use multiple
+Python versions, you can also run ``py -X.Y -m piptools sync`` on Windows and
+``pythonX.Y -m piptools sync`` on other systems.
+
+``pip-sync`` must be installed into and run from the same virtual
+environment as your project to identify which packages to install
+or upgrade.
+
+**Be careful**: ``pip-sync`` is meant to be used only with a
+``requirements.txt`` generated by ``pip-compile``.
+
+.. code-block:: bash
+
+ $ pip-sync
+ Uninstalling flake8-2.4.1:
+ Successfully uninstalled flake8-2.4.1
+ Collecting click==4.1
+ Downloading click-4.1-py2.py3-none-any.whl (62kB)
+ 100% |................................| 65kB 1.8MB/s
+ Found existing installation: click 4.0
+ Uninstalling click-4.0:
+ Successfully uninstalled click-4.0
+ Successfully installed click-4.1
+
+To sync multiple ``*.txt`` dependency lists, just pass them in via command
+line arguments, e.g.
+
+.. code-block:: bash
+
+ $ pip-sync dev-requirements.txt requirements.txt
+
+Passing in empty arguments would cause it to default to ``requirements.txt``.
+
+Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
+``--pip-args`` option, e.g.
+
+.. code-block:: bash
+
+ $ pip-sync requirements.txt --pip-args '--no-cache-dir --no-deps'
+
+If you use multiple Python versions, you can run ``pip-sync`` as
+``py -X.Y -m piptools sync ...`` on Windows and
+``pythonX.Y -m piptools sync ...`` on other systems.
+
+**Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
+``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
+to upgrade those packages.
+
+Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
+===============================================================================
+
+Generally, yes. If you want a reproducible environment installation available from your source control,
+then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
+
+Note that if you are deploying on multiple Python environments (read the section below),
+then you must commit a seperate output file for each Python environment.
+We suggest to use the ``{env}-requirements.txt`` format
+(ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.).
+
+
+Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
+=======================================================================================
+
+The dependencies of a package can change depending on the Python environment in which it
+is installed. Here, we define a Python environment as the combination of Operating
+System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy,
+etc.). For an exact definition, refer to the possible combinations of `PEP 508
+environment markers`_.
+
+As the resulting ``requirements.txt`` can differ for each environment, users must
+execute ``pip-compile`` **on each Python environment separately** to generate a
+``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
+be used as the source file for all environments, using `PEP 508 environment markers`_ as
+needed, the same way it would be done for regular ``pip`` cross-environment usage.
+
+If the generated ``requirements.txt`` remains exactly the same for all Python
+environments, then it can be used across Python environments safely. **But** users
+should be careful as any package update can introduce environment-dependant
+dependencies, making any newly generated ``requirements.txt`` environment-dependant too.
+As a general rule, it's advised that users should still always execute ``pip-compile``
+on each targeted Python environment to avoid issues.
+
+.. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
+
+Other useful tools
+==================
+
+- `pipdeptree`_ to print the dependency tree of the installed packages.
+- ``requirements.in``/``requirements.txt`` syntax highlighting:
+
+ * `requirements.txt.vim`_ for Vim.
+ * `Python extension for VS Code`_ for VS Code.
+
+.. _pipdeptree: https://github.com/naiquevin/pipdeptree
+.. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
+.. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
+
+
+Deprecations
+============
+
+This section lists ``pip-tools`` features that are currently deprecated.
+
+- ``--index/--no-index`` command-line options, use instead
+ ``--emit-index-url/--no-emit-index-url`` (since 5.2.0).
+
+Versions and compatibility
+==========================
+
+The table below summarizes the latest ``pip-tools`` versions with the required ``pip``
+versions.
+
++-----------+-----------------+
+| pip-tools | pip |
++===========+=================+
+| 4.5.x | 8.1.3 - 20.0.x |
++-----------+-----------------+
+| 5.x | 20.0.x - 20.1.x |
++-----------+-----------------+
diff --git a/third_party/python/pip-tools/examples/django.in b/third_party/python/pip-tools/examples/django.in
new file mode 100644
index 0000000000..6b472eee4a
--- /dev/null
+++ b/third_party/python/pip-tools/examples/django.in
@@ -0,0 +1,3 @@
+# This file includes the Django project, and the debug toolbar
+Django<2.2.1 # suppose some version requirement
+django-debug-toolbar
diff --git a/third_party/python/pip-tools/examples/flask.in b/third_party/python/pip-tools/examples/flask.in
new file mode 100644
index 0000000000..8da22a8df9
--- /dev/null
+++ b/third_party/python/pip-tools/examples/flask.in
@@ -0,0 +1,2 @@
+# Flask has 2nd and 3rd level dependencies
+Flask
diff --git a/third_party/python/pip-tools/examples/hypothesis.in b/third_party/python/pip-tools/examples/hypothesis.in
new file mode 100644
index 0000000000..dcd4b2acd1
--- /dev/null
+++ b/third_party/python/pip-tools/examples/hypothesis.in
@@ -0,0 +1 @@
+hypothesis[django]
diff --git a/third_party/python/pip-tools/examples/protection.in b/third_party/python/pip-tools/examples/protection.in
new file mode 100644
index 0000000000..0080fa230e
--- /dev/null
+++ b/third_party/python/pip-tools/examples/protection.in
@@ -0,0 +1,3 @@
+# This package depends on setuptools, which should not end up in the compiled
+# requirements, because it may cause conflicts with pip itself
+python-levenshtein>=0.12.0
diff --git a/third_party/python/pip-tools/examples/sentry.in b/third_party/python/pip-tools/examples/sentry.in
new file mode 100644
index 0000000000..976bbdaab0
--- /dev/null
+++ b/third_party/python/pip-tools/examples/sentry.in
@@ -0,0 +1,2 @@
+# Sentry has a very large dependency tree
+sentry
diff --git a/third_party/python/pip-tools/img/pip-tools-overview.png b/third_party/python/pip-tools/img/pip-tools-overview.png
new file mode 100644
index 0000000000..a4849ad5e0
--- /dev/null
+++ b/third_party/python/pip-tools/img/pip-tools-overview.png
Binary files differ
diff --git a/third_party/python/pip-tools/piptools/__init__.py b/third_party/python/pip-tools/piptools/__init__.py
new file mode 100644
index 0000000000..9f0c95aa56
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/__init__.py
@@ -0,0 +1,11 @@
+import locale
+
+from piptools.click import secho
+
+# Needed for locale.getpreferredencoding(False) to work
+# in pip._internal.utils.encoding.auto_decode
+try:
+ locale.setlocale(locale.LC_ALL, "")
+except locale.Error as e: # pragma: no cover
+ # setlocale can apparently crash if locale are uninitialized
+ secho("Ignoring error when setting locale: {}".format(e), fg="red")
diff --git a/third_party/python/pip-tools/piptools/__main__.py b/third_party/python/pip-tools/piptools/__main__.py
new file mode 100644
index 0000000000..2d8b75e85d
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/__main__.py
@@ -0,0 +1,17 @@
+import click
+
+from piptools.scripts import compile, sync
+
+
+@click.group()
+def cli():
+ pass
+
+
+cli.add_command(compile.cli, "compile")
+cli.add_command(sync.cli, "sync")
+
+
+# Enable ``python -m piptools ...``.
+if __name__ == "__main__": # pragma: no branch
+ cli()
diff --git a/third_party/python/pip-tools/piptools/_compat/__init__.py b/third_party/python/pip-tools/piptools/_compat/__init__.py
new file mode 100644
index 0000000000..f67f0949ad
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/__init__.py
@@ -0,0 +1,12 @@
+# coding: utf-8
+# flake8: noqa
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import six
+
+from .pip_compat import BAR_TYPES, PIP_VERSION, parse_requirements
+
+if six.PY2:
+ from .tempfile import TemporaryDirectory
+else:
+ from tempfile import TemporaryDirectory
diff --git a/third_party/python/pip-tools/piptools/_compat/contextlib.py b/third_party/python/pip-tools/piptools/_compat/contextlib.py
new file mode 100644
index 0000000000..04039ccb01
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/contextlib.py
@@ -0,0 +1,18 @@
+# Ported from python 3.7 contextlib.py
+class nullcontext(object):
+ """Context manager that does no additional processing.
+ Used as a stand-in for a normal context manager, when a particular
+ block of code is only sometimes used with a normal context manager:
+ cm = optional_cm if condition else nullcontext()
+ with cm:
+ # Perform operation, using optional_cm if condition is True
+ """
+
+ def __init__(self, enter_result=None):
+ self.enter_result = enter_result
+
+ def __enter__(self):
+ return self.enter_result
+
+ def __exit__(self, *excinfo):
+ pass
diff --git a/third_party/python/pip-tools/piptools/_compat/pip_compat.py b/third_party/python/pip-tools/piptools/_compat/pip_compat.py
new file mode 100644
index 0000000000..543593ad9a
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/pip_compat.py
@@ -0,0 +1,29 @@
+# -*- coding=utf-8 -*-
+from __future__ import absolute_import
+
+import pip
+from pip._internal.req import parse_requirements as _parse_requirements
+from pip._vendor.packaging.version import parse as parse_version
+
+PIP_VERSION = tuple(map(int, parse_version(pip.__version__).base_version.split(".")))
+
+
+if PIP_VERSION[:2] <= (20, 0):
+
+ def install_req_from_parsed_requirement(req, **kwargs):
+ return req
+
+ from pip._internal.utils.ui import BAR_TYPES
+
+else:
+ from pip._internal.req.constructors import install_req_from_parsed_requirement
+ from pip._internal.cli.progress_bars import BAR_TYPES
+
+
+def parse_requirements(
+ filename, session, finder=None, options=None, constraint=False, isolated=False
+):
+ for parsed_req in _parse_requirements(
+ filename, session, finder=finder, options=options, constraint=constraint
+ ):
+ yield install_req_from_parsed_requirement(parsed_req, isolated=isolated)
diff --git a/third_party/python/pip-tools/piptools/_compat/tempfile.py b/third_party/python/pip-tools/piptools/_compat/tempfile.py
new file mode 100644
index 0000000000..dc7e9ef997
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/tempfile.py
@@ -0,0 +1,88 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function
+
+import os as _os
+import sys as _sys
+import warnings as _warnings
+from tempfile import mkdtemp
+
+
+class TemporaryDirectory(object):
+ """Create and return a temporary directory. This has the same
+ behavior as mkdtemp but can be used as a context manager. For
+ example:
+
+ with TemporaryDirectory() as tmpdir:
+ ...
+
+ Upon exiting the context, the directory and everything contained
+ in it are removed.
+ """
+
+ def __init__(self, suffix="", prefix="tmp", dir=None):
+ self._closed = False
+ self.name = None # Handle mkdtemp raising an exception
+ self.name = mkdtemp(suffix, prefix, dir)
+
+ def __repr__(self):
+ return "<{} {!r}>".format(self.__class__.__name__, self.name)
+
+ def __enter__(self):
+ return self.name
+
+ def cleanup(self):
+ if self.name and not self._closed:
+ try:
+ self._rmtree(self.name)
+ except (TypeError, AttributeError) as ex:
+ # Issue #10188: Emit a warning on stderr
+ # if the directory could not be cleaned
+ # up due to missing globals
+ if "None" not in str(ex):
+ raise
+ print(
+ "ERROR: {!r} while cleaning up {!r}".format(ex, self),
+ file=_sys.stderr,
+ )
+ return
+ self._closed = True
+
+ def __exit__(self, exc, value, tb):
+ self.cleanup()
+
+ def __del__(self):
+ # Issue a ResourceWarning if implicit cleanup needed
+ self.cleanup()
+
+ # XXX (ncoghlan): The following code attempts to make
+ # this class tolerant of the module nulling out process
+ # that happens during CPython interpreter shutdown
+ # Alas, it doesn't actually manage it. See issue #10188
+ _listdir = staticmethod(_os.listdir)
+ _path_join = staticmethod(_os.path.join)
+ _isdir = staticmethod(_os.path.isdir)
+ _islink = staticmethod(_os.path.islink)
+ _remove = staticmethod(_os.remove)
+ _rmdir = staticmethod(_os.rmdir)
+ _warn = _warnings.warn
+
+ def _rmtree(self, path):
+ # Essentially a stripped down version of shutil.rmtree. We can't
+ # use globals because they may be None'ed out at shutdown.
+ for name in self._listdir(path):
+ fullname = self._path_join(path, name)
+ try:
+ isdir = self._isdir(fullname) and not self._islink(fullname)
+ except OSError:
+ isdir = False
+ if isdir:
+ self._rmtree(fullname)
+ else:
+ try:
+ self._remove(fullname)
+ except OSError:
+ pass
+ try:
+ self._rmdir(path)
+ except OSError:
+ pass
diff --git a/third_party/python/pip-tools/piptools/cache.py b/third_party/python/pip-tools/piptools/cache.py
new file mode 100644
index 0000000000..9b6bf550cc
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/cache.py
@@ -0,0 +1,170 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import json
+import os
+import platform
+import sys
+
+from pip._vendor.packaging.requirements import Requirement
+
+from .exceptions import PipToolsError
+from .utils import as_tuple, key_from_req, lookup_table
+
+_PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"}
+
+
+def _implementation_name():
+ """similar to PEP 425, however the minor version is separated from the
+ major to differentation "3.10" and "31.0".
+ """
+ implementation_name = platform.python_implementation().lower()
+ implementation = _PEP425_PY_TAGS.get(implementation_name, "??")
+ return "{}{}.{}".format(implementation, *sys.version_info)
+
+
+class CorruptCacheError(PipToolsError):
+ def __init__(self, path):
+ self.path = path
+
+ def __str__(self):
+ lines = [
+ "The dependency cache seems to have been corrupted.",
+ "Inspect, or delete, the following file:",
+ " {}".format(self.path),
+ ]
+ return os.linesep.join(lines)
+
+
+def read_cache_file(cache_file_path):
+ with open(cache_file_path, "r") as cache_file:
+ try:
+ doc = json.load(cache_file)
+ except ValueError:
+ raise CorruptCacheError(cache_file_path)
+
+ # Check version and load the contents
+ if doc["__format__"] != 1:
+ raise AssertionError("Unknown cache file format")
+ return doc["dependencies"]
+
+
+class DependencyCache(object):
+ """
+ Creates a new persistent dependency cache for the current Python version.
+ The cache file is written to the appropriate user cache dir for the
+ current platform, i.e.
+
+ ~/.cache/pip-tools/depcache-pyX.Y.json
+
+ Where py indicates the Python implementation.
+ Where X.Y indicates the Python version.
+ """
+
+ def __init__(self, cache_dir):
+ if not os.path.isdir(cache_dir):
+ os.makedirs(cache_dir)
+ cache_filename = "depcache-{}.json".format(_implementation_name())
+
+ self._cache_file = os.path.join(cache_dir, cache_filename)
+ self._cache = None
+
+ @property
+ def cache(self):
+ """
+ The dictionary that is the actual in-memory cache. This property
+ lazily loads the cache from disk.
+ """
+ if self._cache is None:
+ self.read_cache()
+ return self._cache
+
+ def as_cache_key(self, ireq):
+ """
+ Given a requirement, return its cache key. This behavior is a little weird
+ in order to allow backwards compatibility with cache files. For a requirement
+ without extras, this will return, for example:
+
+ ("ipython", "2.1.0")
+
+ For a requirement with extras, the extras will be comma-separated and appended
+ to the version, inside brackets, like so:
+
+ ("ipython", "2.1.0[nbconvert,notebook]")
+ """
+ name, version, extras = as_tuple(ireq)
+ if not extras:
+ extras_string = ""
+ else:
+ extras_string = "[{}]".format(",".join(extras))
+ return name, "{}{}".format(version, extras_string)
+
+ def read_cache(self):
+ """Reads the cached contents into memory."""
+ if os.path.exists(self._cache_file):
+ self._cache = read_cache_file(self._cache_file)
+ else:
+ self._cache = {}
+
+ def write_cache(self):
+ """Writes the cache to disk as JSON."""
+ doc = {"__format__": 1, "dependencies": self._cache}
+ with open(self._cache_file, "w") as f:
+ json.dump(doc, f, sort_keys=True)
+
+ def clear(self):
+ self._cache = {}
+ self.write_cache()
+
+ def __contains__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return pkgversion_and_extras in self.cache.get(pkgname, {})
+
+ def __getitem__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return self.cache[pkgname][pkgversion_and_extras]
+
+ def __setitem__(self, ireq, values):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ self.cache.setdefault(pkgname, {})
+ self.cache[pkgname][pkgversion_and_extras] = values
+ self.write_cache()
+
+ def reverse_dependencies(self, ireqs):
+ """
+ Returns a lookup table of reverse dependencies for all the given ireqs.
+
+ Since this is all static, it only works if the dependency cache
+ contains the complete data, otherwise you end up with a partial view.
+ This is typically no problem if you use this function after the entire
+ dependency tree is resolved.
+ """
+ ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs]
+ return self._reverse_dependencies(ireqs_as_cache_values)
+
+ def _reverse_dependencies(self, cache_keys):
+ """
+ Returns a lookup table of reverse dependencies for all the given cache keys.
+
+ Example input:
+
+ [('pep8', '1.5.7'),
+ ('flake8', '2.4.0'),
+ ('mccabe', '0.3'),
+ ('pyflakes', '0.8.1')]
+
+ Example output:
+
+ {'pep8': ['flake8'],
+ 'flake8': [],
+ 'mccabe': ['flake8'],
+ 'pyflakes': ['flake8']}
+
+ """
+ # First, collect all the dependencies into a sequence of (parent, child)
+ # tuples, like [('flake8', 'pep8'), ('flake8', 'mccabe'), ...]
+ return lookup_table(
+ (key_from_req(Requirement(dep_name)), name)
+ for name, version_and_extras in cache_keys
+ for dep_name in self.cache[name][version_and_extras]
+ )
diff --git a/third_party/python/pip-tools/piptools/click.py b/third_party/python/pip-tools/piptools/click.py
new file mode 100644
index 0000000000..86f1612c6a
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/click.py
@@ -0,0 +1,6 @@
+from __future__ import absolute_import
+
+import click
+from click import * # noqa
+
+click.disable_unicode_literals_warning = True
diff --git a/third_party/python/pip-tools/piptools/exceptions.py b/third_party/python/pip-tools/piptools/exceptions.py
new file mode 100644
index 0000000000..5278972741
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/exceptions.py
@@ -0,0 +1,66 @@
+from pip._internal.utils.misc import redact_auth_from_url
+
+
+class PipToolsError(Exception):
+ pass
+
+
+class NoCandidateFound(PipToolsError):
+ def __init__(self, ireq, candidates_tried, finder):
+ self.ireq = ireq
+ self.candidates_tried = candidates_tried
+ self.finder = finder
+
+ def __str__(self):
+ versions = []
+ pre_versions = []
+
+ for candidate in sorted(self.candidates_tried):
+ version = str(candidate.version)
+ if candidate.version.is_prerelease:
+ pre_versions.append(version)
+ else:
+ versions.append(version)
+
+ lines = ["Could not find a version that matches {}".format(self.ireq)]
+
+ if versions:
+ lines.append("Tried: {}".format(", ".join(versions)))
+
+ if pre_versions:
+ if self.finder.allow_all_prereleases:
+ line = "Tried"
+ else:
+ line = "Skipped"
+
+ line += " pre-versions: {}".format(", ".join(pre_versions))
+ lines.append(line)
+
+ if versions or pre_versions:
+ lines.append(
+ "There are incompatible versions in the resolved dependencies:"
+ )
+ source_ireqs = getattr(self.ireq, "_source_ireqs", [])
+ lines.extend(" {}".format(ireq) for ireq in source_ireqs)
+ else:
+ redacted_urls = tuple(
+ redact_auth_from_url(url) for url in self.finder.index_urls
+ )
+ lines.append("No versions found")
+ lines.append(
+ "{} {} reachable?".format(
+ "Were" if len(redacted_urls) > 1 else "Was",
+ " or ".join(redacted_urls),
+ )
+ )
+ return "\n".join(lines)
+
+
+class IncompatibleRequirements(PipToolsError):
+ def __init__(self, ireq_a, ireq_b):
+ self.ireq_a = ireq_a
+ self.ireq_b = ireq_b
+
+ def __str__(self):
+ message = "Incompatible requirements found: {} and {}"
+ return message.format(self.ireq_a, self.ireq_b)
diff --git a/third_party/python/pip-tools/piptools/locations.py b/third_party/python/pip-tools/piptools/locations.py
new file mode 100644
index 0000000000..9ca0ffe436
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/locations.py
@@ -0,0 +1,25 @@
+import os
+from shutil import rmtree
+
+from pip._internal.utils.appdirs import user_cache_dir
+
+from .click import secho
+
+# The user_cache_dir helper comes straight from pip itself
+CACHE_DIR = user_cache_dir("pip-tools")
+
+# NOTE
+# We used to store the cache dir under ~/.pip-tools, which is not the
+# preferred place to store caches for any platform. This has been addressed
+# in pip-tools==1.0.5, but to be good citizens, we point this out explicitly
+# to the user when this directory is still found.
+LEGACY_CACHE_DIR = os.path.expanduser("~/.pip-tools")
+
+if os.path.exists(LEGACY_CACHE_DIR):
+ secho(
+ "Removing old cache dir {} (new cache dir is {})".format(
+ LEGACY_CACHE_DIR, CACHE_DIR
+ ),
+ fg="yellow",
+ )
+ rmtree(LEGACY_CACHE_DIR)
diff --git a/third_party/python/pip-tools/piptools/logging.py b/third_party/python/pip-tools/piptools/logging.py
new file mode 100644
index 0000000000..dcf068f7a2
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/logging.py
@@ -0,0 +1,62 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import contextlib
+import logging
+import sys
+
+from . import click
+
+# Initialise the builtin logging module for other component using it.
+# Ex: pip
+logging.basicConfig()
+
+
+class LogContext(object):
+ stream = sys.stderr
+
+ def __init__(self, verbosity=0, indent_width=2):
+ self.verbosity = verbosity
+ self.current_indent = 0
+ self._indent_width = indent_width
+
+ def log(self, message, *args, **kwargs):
+ kwargs.setdefault("err", True)
+ prefix = " " * self.current_indent
+ click.secho(prefix + message, *args, **kwargs)
+
+ def debug(self, *args, **kwargs):
+ if self.verbosity >= 1:
+ self.log(*args, **kwargs)
+
+ def info(self, *args, **kwargs):
+ if self.verbosity >= 0:
+ self.log(*args, **kwargs)
+
+ def warning(self, *args, **kwargs):
+ kwargs.setdefault("fg", "yellow")
+ self.log(*args, **kwargs)
+
+ def error(self, *args, **kwargs):
+ kwargs.setdefault("fg", "red")
+ self.log(*args, **kwargs)
+
+ def _indent(self):
+ self.current_indent += self._indent_width
+
+ def _dedent(self):
+ self.current_indent -= self._indent_width
+
+ @contextlib.contextmanager
+ def indentation(self):
+ """
+ Increase indentation.
+ """
+ self._indent()
+ try:
+ yield
+ finally:
+ self._dedent()
+
+
+log = LogContext()
diff --git a/third_party/python/pip-tools/piptools/repositories/__init__.py b/third_party/python/pip-tools/piptools/repositories/__init__.py
new file mode 100644
index 0000000000..ce5142e8c6
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/__init__.py
@@ -0,0 +1,3 @@
+# flake8: noqa
+from .local import LocalRequirementsRepository
+from .pypi import PyPIRepository
diff --git a/third_party/python/pip-tools/piptools/repositories/base.py b/third_party/python/pip-tools/piptools/repositories/base.py
new file mode 100644
index 0000000000..0343fe7d79
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/base.py
@@ -0,0 +1,55 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class BaseRepository(object):
+ def clear_caches(self):
+ """Should clear any caches used by the implementation."""
+
+ def freshen_build_caches(self):
+ """Should start with fresh build/source caches."""
+
+ @abstractmethod
+ def find_best_match(self, ireq):
+ """
+ Return a Version object that indicates the best match for the given
+ InstallRequirement according to the repository.
+ """
+
+ @abstractmethod
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+
+ @abstractmethod
+ def get_hashes(self, ireq):
+ """
+ Given a pinned InstallRequire, returns a set of hashes that represent
+ all of the files for a given requirement. It is not acceptable for an
+ editable or unpinned requirement to be passed to this function.
+ """
+
+ @abstractmethod
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+ """
+
+ @abstractmethod
+ def copy_ireq_dependencies(self, source, dest):
+ """
+ Notifies the repository that `dest` is a copy of `source`, and so it
+ has the same dependencies. Otherwise, once we prepare an ireq to assign
+ it its name, we would lose track of those dependencies on combining
+ that ireq with others.
+ """
diff --git a/third_party/python/pip-tools/piptools/repositories/local.py b/third_party/python/pip-tools/piptools/repositories/local.py
new file mode 100644
index 0000000000..6c91d1b4f2
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/local.py
@@ -0,0 +1,99 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from contextlib import contextmanager
+
+from pip._internal.utils.hashes import FAVORITE_HASH
+
+from .._compat import PIP_VERSION
+from .base import BaseRepository
+
+from piptools.utils import as_tuple, key_from_ireq, make_install_requirement
+
+
+def ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ """
+ Return True if the given InstallationRequirement is satisfied by the
+ previously encountered version pin.
+ """
+ version = next(iter(existing_pin.req.specifier)).version
+ return ireq.req.specifier.contains(
+ version, prereleases=existing_pin.req.specifier.prereleases
+ )
+
+
+class LocalRequirementsRepository(BaseRepository):
+ """
+ The LocalRequirementsRepository proxied the _real_ repository by first
+ checking if a requirement can be satisfied by existing pins (i.e. the
+ result of a previous compile step).
+
+ In effect, if a requirement can be satisfied with a version pinned in the
+ requirements file, we prefer that version over the best match found in
+ PyPI. This keeps updates to the requirements.txt down to a minimum.
+ """
+
+ def __init__(self, existing_pins, proxied_repository, reuse_hashes=True):
+ self._reuse_hashes = reuse_hashes
+ self.repository = proxied_repository
+ self.existing_pins = existing_pins
+
+ @property
+ def options(self):
+ return self.repository.options
+
+ @property
+ def finder(self):
+ return self.repository.finder
+
+ @property
+ def session(self):
+ return self.repository.session
+
+ @property
+ def DEFAULT_INDEX_URL(self):
+ return self.repository.DEFAULT_INDEX_URL
+
+ def clear_caches(self):
+ self.repository.clear_caches()
+
+ def freshen_build_caches(self):
+ self.repository.freshen_build_caches()
+
+ def find_best_match(self, ireq, prereleases=None):
+ key = key_from_ireq(ireq)
+ existing_pin = self.existing_pins.get(key)
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ project, version, _ = as_tuple(existing_pin)
+ return make_install_requirement(
+ project, version, ireq.extras, constraint=ireq.constraint
+ )
+ else:
+ return self.repository.find_best_match(ireq, prereleases)
+
+ def get_dependencies(self, ireq):
+ return self.repository.get_dependencies(ireq)
+
+ def get_hashes(self, ireq):
+ existing_pin = self._reuse_hashes and self.existing_pins.get(
+ key_from_ireq(ireq)
+ )
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ if PIP_VERSION[:2] <= (20, 0):
+ hashes = existing_pin.options.get("hashes", {})
+ else:
+ hashes = existing_pin.hash_options
+ hexdigests = hashes.get(FAVORITE_HASH)
+ if hexdigests:
+ return {
+ ":".join([FAVORITE_HASH, hexdigest]) for hexdigest in hexdigests
+ }
+ return self.repository.get_hashes(ireq)
+
+ @contextmanager
+ def allow_all_wheels(self):
+ with self.repository.allow_all_wheels():
+ yield
+
+ def copy_ireq_dependencies(self, source, dest):
+ self.repository.copy_ireq_dependencies(source, dest)
diff --git a/third_party/python/pip-tools/piptools/repositories/pypi.py b/third_party/python/pip-tools/piptools/repositories/pypi.py
new file mode 100644
index 0000000000..7480b5e855
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/pypi.py
@@ -0,0 +1,524 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import collections
+import hashlib
+import itertools
+import logging
+import os
+from contextlib import contextmanager
+from shutil import rmtree
+
+from pip._internal.cache import WheelCache
+from pip._internal.commands import create_command
+from pip._internal.models.index import PackageIndex, PyPI
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import get_requirement_tracker
+from pip._internal.utils.hashes import FAVORITE_HASH
+from pip._internal.utils.logging import indent_log, setup_logging
+from pip._internal.utils.misc import normalize_path
+from pip._internal.utils.temp_dir import TempDirectory, global_tempdir_manager
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._vendor.requests import RequestException
+
+from .._compat import BAR_TYPES, PIP_VERSION, TemporaryDirectory, contextlib
+from ..click import progressbar
+from ..exceptions import NoCandidateFound
+from ..logging import log
+from ..utils import (
+ as_tuple,
+ fs_str,
+ is_pinned_requirement,
+ is_url_requirement,
+ lookup_table,
+ make_install_requirement,
+)
+from .base import BaseRepository
+
+FILE_CHUNK_SIZE = 4096
+FileStream = collections.namedtuple("FileStream", "stream size")
+
+
+class PyPIRepository(BaseRepository):
+ DEFAULT_INDEX_URL = PyPI.simple_url
+ HASHABLE_PACKAGE_TYPES = {"bdist_wheel", "sdist"}
+
+ """
+ The PyPIRepository will use the provided Finder instance to lookup
+ packages. Typically, it looks up packages on PyPI (the default implicit
+ config), but any other PyPI mirror can be used if index_urls is
+ changed/configured on the Finder.
+ """
+
+ def __init__(self, pip_args, cache_dir):
+ # Use pip's parser for pip.conf management and defaults.
+ # General options (find_links, index_url, extra_index_url, trusted_host,
+ # and pre) are deferred to pip.
+ self.command = create_command("install")
+ self.options, _ = self.command.parse_args(pip_args)
+ if self.options.cache_dir:
+ self.options.cache_dir = normalize_path(self.options.cache_dir)
+
+ self.options.require_hashes = False
+ self.options.ignore_dependencies = False
+
+ self.session = self.command._build_session(self.options)
+ self.finder = self.command._build_package_finder(
+ options=self.options, session=self.session
+ )
+
+ # Caches
+ # stores project_name => InstallationCandidate mappings for all
+ # versions reported by PyPI, so we only have to ask once for each
+ # project
+ self._available_candidates_cache = {}
+
+ # stores InstallRequirement => list(InstallRequirement) mappings
+ # of all secondary dependencies for the given requirement, so we
+ # only have to go to disk once for each requirement
+ self._dependencies_cache = {}
+
+ # Setup file paths
+ self.freshen_build_caches()
+ self._cache_dir = normalize_path(cache_dir)
+ self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs"))
+ self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels"))
+
+ self._setup_logging()
+
+ def freshen_build_caches(self):
+ """
+ Start with fresh build/source caches. Will remove any old build
+ caches from disk automatically.
+ """
+ self._build_dir = TemporaryDirectory(fs_str("build"))
+ self._source_dir = TemporaryDirectory(fs_str("source"))
+
+ @property
+ def build_dir(self):
+ return self._build_dir.name
+
+ @property
+ def source_dir(self):
+ return self._source_dir.name
+
+ def clear_caches(self):
+ rmtree(self._download_dir, ignore_errors=True)
+ rmtree(self._wheel_download_dir, ignore_errors=True)
+
+ def find_all_candidates(self, req_name):
+ if req_name not in self._available_candidates_cache:
+ candidates = self.finder.find_all_candidates(req_name)
+ self._available_candidates_cache[req_name] = candidates
+ return self._available_candidates_cache[req_name]
+
+ def find_best_match(self, ireq, prereleases=None):
+ """
+ Returns a Version object that indicates the best match for the given
+ InstallRequirement according to the external repository.
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ return ireq # return itself as the best match
+
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = ireq.specifier.filter(
+ (candidate.version for candidate in all_candidates), prereleases=prereleases
+ )
+
+ matching_candidates = list(
+ itertools.chain.from_iterable(
+ candidates_by_version[ver] for ver in matching_versions
+ )
+ )
+ if not matching_candidates:
+ raise NoCandidateFound(ireq, all_candidates, self.finder)
+
+ evaluator = self.finder.make_candidate_evaluator(ireq.name)
+ best_candidate_result = evaluator.compute_best_candidate(matching_candidates)
+ best_candidate = best_candidate_result.best_candidate
+
+ # Turn the candidate into a pinned InstallRequirement
+ return make_install_requirement(
+ best_candidate.name,
+ best_candidate.version,
+ ireq.extras,
+ constraint=ireq.constraint,
+ )
+
+ def resolve_reqs(self, download_dir, ireq, wheel_cache):
+ with get_requirement_tracker() as req_tracker, TempDirectory(
+ kind="resolver"
+ ) as temp_dir, indent_log():
+ preparer = self.command.make_requirement_preparer(
+ temp_build_dir=temp_dir,
+ options=self.options,
+ req_tracker=req_tracker,
+ session=self.session,
+ finder=self.finder,
+ use_user_site=False,
+ download_dir=download_dir,
+ wheel_download_dir=self._wheel_download_dir,
+ )
+
+ reqset = RequirementSet()
+ if PIP_VERSION[:2] <= (20, 1):
+ ireq.is_direct = True
+ else:
+ ireq.user_supplied = True
+ reqset.add_requirement(ireq)
+
+ resolver = self.command.make_resolver(
+ preparer=preparer,
+ finder=self.finder,
+ options=self.options,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ ignore_installed=True,
+ ignore_requires_python=False,
+ force_reinstall=False,
+ upgrade_strategy="to-satisfy-only",
+ )
+ results = resolver._resolve_one(reqset, ireq)
+ if not ireq.prepared:
+ # If still not prepared, e.g. a constraint, do enough to assign
+ # the ireq a name:
+ resolver._get_abstract_dist_for(ireq)
+
+ if PIP_VERSION[:2] <= (20, 0):
+ reqset.cleanup_files()
+
+ return set(results)
+
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+ if not (
+ ireq.editable or is_url_requirement(ireq) or is_pinned_requirement(ireq)
+ ):
+ raise TypeError(
+ "Expected url, pinned or editable InstallRequirement, got {}".format(
+ ireq
+ )
+ )
+
+ if ireq not in self._dependencies_cache:
+ if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)):
+ # No download_dir for locally available editable requirements.
+ # If a download_dir is passed, pip will unnecessarely
+ # archive the entire source directory
+ download_dir = None
+ elif ireq.link and ireq.link.is_vcs:
+ # No download_dir for VCS sources. This also works around pip
+ # using git-checkout-index, which gets rid of the .git dir.
+ download_dir = None
+ else:
+ download_dir = self._get_download_path(ireq)
+ if not os.path.isdir(download_dir):
+ os.makedirs(download_dir)
+ if not os.path.isdir(self._wheel_download_dir):
+ os.makedirs(self._wheel_download_dir)
+
+ with global_tempdir_manager():
+ wheel_cache = WheelCache(self._cache_dir, self.options.format_control)
+ prev_tracker = os.environ.get("PIP_REQ_TRACKER")
+ try:
+ self._dependencies_cache[ireq] = self.resolve_reqs(
+ download_dir, ireq, wheel_cache
+ )
+ finally:
+ if "PIP_REQ_TRACKER" in os.environ:
+ if prev_tracker:
+ os.environ["PIP_REQ_TRACKER"] = prev_tracker
+ else:
+ del os.environ["PIP_REQ_TRACKER"]
+
+ if PIP_VERSION[:2] <= (20, 0):
+ wheel_cache.cleanup()
+
+ return self._dependencies_cache[ireq]
+
+ def copy_ireq_dependencies(self, source, dest):
+ try:
+ self._dependencies_cache[dest] = self._dependencies_cache[source]
+ except KeyError:
+ # `source` may not be in cache yet.
+ pass
+
+ def _get_project(self, ireq):
+ """
+ Return a dict of a project info from PyPI JSON API for a given
+ InstallRequirement. Return None on HTTP/JSON error or if a package
+ is not found on PyPI server.
+
+ API reference: https://warehouse.readthedocs.io/api-reference/json/
+ """
+ package_indexes = (
+ PackageIndex(url=index_url, file_storage_domain="")
+ for index_url in self.finder.search_scope.index_urls
+ )
+ for package_index in package_indexes:
+ url = "{url}/{name}/json".format(url=package_index.pypi_url, name=ireq.name)
+ try:
+ response = self.session.get(url)
+ except RequestException as e:
+ log.debug(
+ "Fetch package info from PyPI failed: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+
+ # Skip this PyPI server, because there is no package
+ # or JSON API might be not supported
+ if response.status_code == 404:
+ continue
+
+ try:
+ data = response.json()
+ except ValueError as e:
+ log.debug(
+ "Cannot parse JSON response from PyPI: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+ return data
+ return None
+
+ def _get_download_path(self, ireq):
+ """
+ Determine the download dir location in a way which avoids name
+ collisions.
+ """
+ if ireq.link:
+ salt = hashlib.sha224(ireq.link.url_without_fragment.encode()).hexdigest()
+ # Nest directories to avoid running out of top level dirs on some FS
+ # (see pypi _get_cache_path_parts, which inspired this)
+ salt = [salt[:2], salt[2:4], salt[4:6], salt[6:]]
+ return os.path.join(self._download_dir, *salt)
+ else:
+ return self._download_dir
+
+ def get_hashes(self, ireq):
+ """
+ Given an InstallRequirement, return a set of hashes that represent all
+ of the files for a given requirement. Unhashable requirements return an
+ empty set. Unpinned requirements raise a TypeError.
+ """
+
+ if ireq.link:
+ link = ireq.link
+
+ if link.is_vcs or (link.is_file and link.is_existing_dir()):
+ # Return empty set for unhashable requirements.
+ # Unhashable logic modeled on pip's
+ # RequirementPreparer.prepare_linked_requirement
+ return set()
+
+ if is_url_requirement(ireq):
+ # Directly hash URL requirements.
+ # URL requirements may have been previously downloaded and cached
+ # locally by self.resolve_reqs()
+ cached_path = os.path.join(self._get_download_path(ireq), link.filename)
+ if os.path.exists(cached_path):
+ cached_link = Link(path_to_url(cached_path))
+ else:
+ cached_link = link
+ return {self._get_file_hash(cached_link)}
+
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected pinned requirement, got {}".format(ireq))
+
+ log.debug("{}".format(ireq.name))
+
+ with log.indentation():
+ hashes = self._get_hashes_from_pypi(ireq)
+ if hashes is None:
+ log.log("Couldn't get hashes from PyPI, fallback to hashing files")
+ return self._get_hashes_from_files(ireq)
+
+ return hashes
+
+ def _get_hashes_from_pypi(self, ireq):
+ """
+ Return a set of hashes from PyPI JSON API for a given InstallRequirement.
+ Return None if fetching data is failed or missing digests.
+ """
+ project = self._get_project(ireq)
+ if project is None:
+ return None
+
+ _, version, _ = as_tuple(ireq)
+
+ try:
+ release_files = project["releases"][version]
+ except KeyError:
+ log.debug("Missing release files on PyPI")
+ return None
+
+ try:
+ hashes = {
+ "{algo}:{digest}".format(
+ algo=FAVORITE_HASH, digest=file_["digests"][FAVORITE_HASH]
+ )
+ for file_ in release_files
+ if file_["packagetype"] in self.HASHABLE_PACKAGE_TYPES
+ }
+ except KeyError:
+ log.debug("Missing digests of release files on PyPI")
+ return None
+
+ return hashes
+
+ def _get_hashes_from_files(self, ireq):
+ """
+ Return a set of hashes for all release files of a given InstallRequirement.
+ """
+ # We need to get all of the candidates that match our current version
+ # pin, these will represent all of the files that could possibly
+ # satisfy this constraint.
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = list(
+ ireq.specifier.filter((candidate.version for candidate in all_candidates))
+ )
+ matching_candidates = candidates_by_version[matching_versions[0]]
+
+ return {
+ self._get_file_hash(candidate.link) for candidate in matching_candidates
+ }
+
+ def _get_file_hash(self, link):
+ log.debug("Hashing {}".format(link.show_url))
+ h = hashlib.new(FAVORITE_HASH)
+ with open_local_or_remote_file(link, self.session) as f:
+ # Chunks to iterate
+ chunks = iter(lambda: f.stream.read(FILE_CHUNK_SIZE), b"")
+
+ # Choose a context manager depending on verbosity
+ if log.verbosity >= 1:
+ iter_length = f.size / FILE_CHUNK_SIZE if f.size else None
+ bar_template = "{prefix} |%(bar)s| %(info)s".format(
+ prefix=" " * log.current_indent
+ )
+ context_manager = progressbar(
+ chunks,
+ length=iter_length,
+ # Make it look like default pip progress bar
+ fill_char="â–ˆ",
+ empty_char=" ",
+ bar_template=bar_template,
+ width=32,
+ )
+ else:
+ context_manager = contextlib.nullcontext(chunks)
+
+ # Iterate over the chosen context manager
+ with context_manager as bar:
+ for chunk in bar:
+ h.update(chunk)
+ return ":".join([FAVORITE_HASH, h.hexdigest()])
+
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+
+ This also saves the candidate cache and set a new one, or else the results from
+ the previous non-patched calls will interfere.
+ """
+
+ def _wheel_supported(self, tags=None):
+ # Ignore current platform. Support everything.
+ return True
+
+ def _wheel_support_index_min(self, tags=None):
+ # All wheels are equal priority for sorting.
+ return 0
+
+ original_wheel_supported = Wheel.supported
+ original_support_index_min = Wheel.support_index_min
+ original_cache = self._available_candidates_cache
+
+ Wheel.supported = _wheel_supported
+ Wheel.support_index_min = _wheel_support_index_min
+ self._available_candidates_cache = {}
+
+ try:
+ yield
+ finally:
+ Wheel.supported = original_wheel_supported
+ Wheel.support_index_min = original_support_index_min
+ self._available_candidates_cache = original_cache
+
+ def _setup_logging(self):
+ """
+ Setup pip's logger. Ensure pip is verbose same as pip-tools and sync
+ pip's log stream with LogContext.stream.
+ """
+ # Default pip's logger is noisy, so decrease it's verbosity
+ setup_logging(
+ verbosity=log.verbosity - 1,
+ no_color=self.options.no_color,
+ user_log_file=self.options.log,
+ )
+
+ # Sync pip's console handler stream with LogContext.stream
+ logger = logging.getLogger()
+ for handler in logger.handlers:
+ if handler.name == "console": # pragma: no branch
+ handler.stream = log.stream
+ break
+ else: # pragma: no cover
+ # There is always a console handler. This warning would be a signal that
+ # this block should be removed/revisited, because of pip possibly
+ # refactored-out logging config.
+ log.warning("Couldn't find a 'console' logging handler")
+
+ # Sync pip's progress bars stream with LogContext.stream
+ for bar_cls in itertools.chain(*BAR_TYPES.values()):
+ bar_cls.file = log.stream
+
+
+@contextmanager
+def open_local_or_remote_file(link, session):
+ """
+ Open local or remote file for reading.
+
+ :type link: pip.index.Link
+ :type session: requests.Session
+ :raises ValueError: If link points to a local directory.
+ :return: a context manager to a FileStream with the opened file-like object
+ """
+ url = link.url_without_fragment
+
+ if link.is_file:
+ # Local URL
+ local_path = url_to_path(url)
+ if os.path.isdir(local_path):
+ raise ValueError("Cannot open directory for read: {}".format(url))
+ else:
+ st = os.stat(local_path)
+ with open(local_path, "rb") as local_file:
+ yield FileStream(stream=local_file, size=st.st_size)
+ else:
+ # Remote URL
+ headers = {"Accept-Encoding": "identity"}
+ response = session.get(url, headers=headers, stream=True)
+
+ # Content length must be int or None
+ try:
+ content_length = int(response.headers["content-length"])
+ except (ValueError, KeyError, TypeError):
+ content_length = None
+
+ try:
+ yield FileStream(stream=response.raw, size=content_length)
+ finally:
+ response.close()
diff --git a/third_party/python/pip-tools/piptools/resolver.py b/third_party/python/pip-tools/piptools/resolver.py
new file mode 100644
index 0000000000..954f751ab9
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/resolver.py
@@ -0,0 +1,408 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import copy
+import os
+from functools import partial
+from itertools import chain, count, groupby
+
+from pip._internal.req.constructors import install_req_from_line
+
+from . import click
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ format_requirement,
+ format_specifier,
+ is_pinned_requirement,
+ is_url_requirement,
+ key_from_ireq,
+)
+
+green = partial(click.style, fg="green")
+magenta = partial(click.style, fg="magenta")
+
+
+class RequirementSummary(object):
+ """
+ Summary of a requirement's properties for comparison purposes.
+ """
+
+ def __init__(self, ireq):
+ self.req = ireq.req
+ self.key = key_from_ireq(ireq)
+ self.extras = frozenset(ireq.extras)
+ self.specifier = ireq.specifier
+
+ def __eq__(self, other):
+ return (
+ self.key == other.key
+ and self.specifier == other.specifier
+ and self.extras == other.extras
+ )
+
+ def __hash__(self):
+ return hash((self.key, self.specifier, self.extras))
+
+ def __str__(self):
+ return repr((self.key, str(self.specifier), sorted(self.extras)))
+
+
+def combine_install_requirements(repository, ireqs):
+ """
+ Return a single install requirement that reflects a combination of
+ all the inputs.
+ """
+ # We will store the source ireqs in a _source_ireqs attribute;
+ # if any of the inputs have this, then use those sources directly.
+ source_ireqs = []
+ for ireq in ireqs:
+ source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
+
+ # Optimization. Don't bother with combination logic.
+ if len(source_ireqs) == 1:
+ return source_ireqs[0]
+
+ # deepcopy the accumulator so as to not modify the inputs
+ combined_ireq = copy.deepcopy(source_ireqs[0])
+ repository.copy_ireq_dependencies(source_ireqs[0], combined_ireq)
+
+ for ireq in source_ireqs[1:]:
+ # NOTE we may be losing some info on dropped reqs here
+ combined_ireq.req.specifier &= ireq.req.specifier
+ if combined_ireq.constraint:
+ # We don't find dependencies for constraint ireqs, so copy them
+ # from non-constraints:
+ repository.copy_ireq_dependencies(ireq, combined_ireq)
+ combined_ireq.constraint &= ireq.constraint
+ # Return a sorted, de-duped tuple of extras
+ combined_ireq.extras = tuple(
+ sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
+ )
+
+ # InstallRequirements objects are assumed to come from only one source, and
+ # so they support only a single comes_from entry. This function breaks this
+ # model. As a workaround, we deterministically choose a single source for
+ # the comes_from entry, and add an extra _source_ireqs attribute to keep
+ # track of multiple sources for use within pip-tools.
+ if len(source_ireqs) > 1:
+ if any(ireq.comes_from is None for ireq in source_ireqs):
+ # None indicates package was directly specified.
+ combined_ireq.comes_from = None
+ else:
+ # Populate the comes_from field from one of the sources.
+ # Requirement input order is not stable, so we need to sort:
+ # We choose the shortest entry in order to keep the printed
+ # representation as concise as possible.
+ combined_ireq.comes_from = min(
+ (ireq.comes_from for ireq in source_ireqs),
+ key=lambda x: (len(str(x)), str(x)),
+ )
+ combined_ireq._source_ireqs = source_ireqs
+ return combined_ireq
+
+
+class Resolver(object):
+ def __init__(
+ self,
+ constraints,
+ repository,
+ cache,
+ prereleases=False,
+ clear_caches=False,
+ allow_unsafe=False,
+ ):
+ """
+ This class resolves a given set of constraints (a collection of
+ InstallRequirement objects) by consulting the given Repository and the
+ DependencyCache.
+ """
+ self.our_constraints = set(constraints)
+ self.their_constraints = set()
+ self.repository = repository
+ self.dependency_cache = cache
+ self.prereleases = prereleases
+ self.clear_caches = clear_caches
+ self.allow_unsafe = allow_unsafe
+ self.unsafe_constraints = set()
+
+ @property
+ def constraints(self):
+ return set(
+ self._group_constraints(chain(self.our_constraints, self.their_constraints))
+ )
+
+ def resolve_hashes(self, ireqs):
+ """
+ Finds acceptable hashes for all of the given InstallRequirements.
+ """
+ log.debug("")
+ log.debug("Generating hashes:")
+ with self.repository.allow_all_wheels(), log.indentation():
+ return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
+
+ def resolve(self, max_rounds=10):
+ """
+ Finds concrete package versions for all the given InstallRequirements
+ and their recursive dependencies. The end result is a flat list of
+ (name, version) tuples. (Or an editable package.)
+
+ Resolves constraints one round at a time, until they don't change
+ anymore. Protects against infinite loops by breaking out after a max
+ number rounds.
+ """
+ if self.clear_caches:
+ self.dependency_cache.clear()
+ self.repository.clear_caches()
+
+ # Ignore existing packages
+ os.environ[str("PIP_EXISTS_ACTION")] = str(
+ "i"
+ ) # NOTE: str() wrapping necessary for Python 2/3 compat
+ for current_round in count(start=1): # pragma: no branch
+ if current_round > max_rounds:
+ raise RuntimeError(
+ "No stable configuration of concrete packages "
+ "could be found for the given constraints after "
+ "{max_rounds} rounds of resolving.\n"
+ "This is likely a bug.".format(max_rounds=max_rounds)
+ )
+
+ log.debug("")
+ log.debug(magenta("{:^60}".format("ROUND {}".format(current_round))))
+ has_changed, best_matches = self._resolve_one_round()
+ log.debug("-" * 60)
+ log.debug(
+ "Result of round {}: {}".format(
+ current_round, "not stable" if has_changed else "stable, done"
+ )
+ )
+ if not has_changed:
+ break
+
+ # If a package version (foo==2.0) was built in a previous round,
+ # and in this round a different version of foo needs to be built
+ # (i.e. foo==1.0), the directory will exist already, which will
+ # cause a pip build failure. The trick is to start with a new
+ # build cache dir for every round, so this can never happen.
+ self.repository.freshen_build_caches()
+
+ del os.environ["PIP_EXISTS_ACTION"]
+
+ # Only include hard requirements and not pip constraints
+ results = {req for req in best_matches if not req.constraint}
+
+ # Filter out unsafe requirements.
+ self.unsafe_constraints = set()
+ if not self.allow_unsafe:
+ # reverse_dependencies is used to filter out packages that are only
+ # required by unsafe packages. This logic is incomplete, as it would
+ # fail to filter sub-sub-dependencies of unsafe packages. None of the
+ # UNSAFE_PACKAGES currently have any dependencies at all (which makes
+ # sense for installation tools) so this seems sufficient.
+ reverse_dependencies = self.reverse_dependencies(results)
+ for req in results.copy():
+ required_by = reverse_dependencies.get(req.name.lower(), [])
+ if req.name in UNSAFE_PACKAGES or (
+ required_by and all(name in UNSAFE_PACKAGES for name in required_by)
+ ):
+ self.unsafe_constraints.add(req)
+ results.remove(req)
+
+ return results
+
+ def _group_constraints(self, constraints):
+ """
+ Groups constraints (remember, InstallRequirements!) by their key name,
+ and combining their SpecifierSets into a single InstallRequirement per
+ package. For example, given the following constraints:
+
+ Django<1.9,>=1.4.2
+ django~=1.5
+ Flask~=0.7
+
+ This will be combined into a single entry per package:
+
+ django~=1.5,<1.9,>=1.4.2
+ flask~=0.7
+
+ """
+ constraints = list(constraints)
+ for ireq in constraints:
+ if ireq.name is None:
+ # get_dependencies has side-effect of assigning name to ireq
+ # (so we can group by the name below).
+ self.repository.get_dependencies(ireq)
+
+ # Sort first by name, i.e. the groupby key. Then within each group,
+ # sort editables first.
+ # This way, we don't bother with combining editables, since the first
+ # ireq will be editable, if one exists.
+ for _, ireqs in groupby(
+ sorted(constraints, key=(lambda x: (key_from_ireq(x), not x.editable))),
+ key=key_from_ireq,
+ ):
+ yield combine_install_requirements(self.repository, ireqs)
+
+ def _resolve_one_round(self):
+ """
+ Resolves one level of the current constraints, by finding the best
+ match for each package in the repository and adding all requirements
+ for those best package versions. Some of these constraints may be new
+ or updated.
+
+ Returns whether new constraints appeared in this round. If no
+ constraints were added or changed, this indicates a stable
+ configuration.
+ """
+ # Sort this list for readability of terminal output
+ constraints = sorted(self.constraints, key=key_from_ireq)
+
+ log.debug("Current constraints:")
+ with log.indentation():
+ for constraint in constraints:
+ log.debug(str(constraint))
+
+ log.debug("")
+ log.debug("Finding the best candidates:")
+ with log.indentation():
+ best_matches = {self.get_best_match(ireq) for ireq in constraints}
+
+ # Find the new set of secondary dependencies
+ log.debug("")
+ log.debug("Finding secondary dependencies:")
+
+ their_constraints = []
+ with log.indentation():
+ for best_match in best_matches:
+ their_constraints.extend(self._iter_dependencies(best_match))
+ # Grouping constraints to make clean diff between rounds
+ theirs = set(self._group_constraints(their_constraints))
+
+ # NOTE: We need to compare RequirementSummary objects, since
+ # InstallRequirement does not define equality
+ diff = {RequirementSummary(t) for t in theirs} - {
+ RequirementSummary(t) for t in self.their_constraints
+ }
+ removed = {RequirementSummary(t) for t in self.their_constraints} - {
+ RequirementSummary(t) for t in theirs
+ }
+
+ has_changed = len(diff) > 0 or len(removed) > 0
+ if has_changed:
+ log.debug("")
+ log.debug("New dependencies found in this round:")
+ with log.indentation():
+ for new_dependency in sorted(diff, key=key_from_ireq):
+ log.debug("adding {}".format(new_dependency))
+ log.debug("Removed dependencies in this round:")
+ with log.indentation():
+ for removed_dependency in sorted(removed, key=key_from_ireq):
+ log.debug("removing {}".format(removed_dependency))
+
+ # Store the last round's results in the their_constraints
+ self.their_constraints = theirs
+ return has_changed, best_matches
+
+ def get_best_match(self, ireq):
+ """
+ Returns a (pinned or editable) InstallRequirement, indicating the best
+ match to use for the given InstallRequirement (in the form of an
+ InstallRequirement).
+
+ Example:
+ Given the constraint Flask>=0.10, may return Flask==0.10.1 at
+ a certain moment in time.
+
+ Pinned requirements will always return themselves, i.e.
+
+ Flask==0.10.1 => Flask==0.10.1
+
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif is_pinned_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif ireq.constraint:
+ # NOTE: This is not a requirement (yet) and does not need
+ # to be resolved
+ best_match = ireq
+ else:
+ best_match = self.repository.find_best_match(
+ ireq, prereleases=self.prereleases
+ )
+
+ # Format the best match
+ log.debug(
+ "found candidate {} (constraint was {})".format(
+ format_requirement(best_match), format_specifier(ireq)
+ )
+ )
+ best_match.comes_from = ireq.comes_from
+ if hasattr(ireq, "_source_ireqs"):
+ best_match._source_ireqs = ireq._source_ireqs
+ return best_match
+
+ def _iter_dependencies(self, ireq):
+ """
+ Given a pinned, url, or editable InstallRequirement, collects all the
+ secondary dependencies for them, either by looking them up in a local
+ cache, or by reaching out to the repository.
+
+ Editable requirements will never be looked up, as they may have
+ changed at any time.
+ """
+ # Pip does not resolve dependencies of constraints. We skip handling
+ # constraints here as well to prevent the cache from being polluted.
+ # Constraints that are later determined to be dependencies will be
+ # marked as non-constraints in later rounds by
+ # `combine_install_requirements`, and will be properly resolved.
+ # See https://github.com/pypa/pip/
+ # blob/6896dfcd831330c13e076a74624d95fa55ff53f4/src/pip/_internal/
+ # legacy_resolve.py#L325
+ if ireq.constraint:
+ return
+
+ if ireq.editable or is_url_requirement(ireq):
+ for dependency in self.repository.get_dependencies(ireq):
+ yield dependency
+ return
+ elif not is_pinned_requirement(ireq):
+ raise TypeError(
+ "Expected pinned or editable requirement, got {}".format(ireq)
+ )
+
+ # Now, either get the dependencies from the dependency cache (for
+ # speed), or reach out to the external repository to
+ # download and inspect the package version and get dependencies
+ # from there
+ if ireq not in self.dependency_cache:
+ log.debug(
+ "{} not in cache, need to check index".format(format_requirement(ireq)),
+ fg="yellow",
+ )
+ dependencies = self.repository.get_dependencies(ireq)
+ self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies)
+
+ # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
+ dependency_strings = self.dependency_cache[ireq]
+ log.debug(
+ "{:25} requires {}".format(
+ format_requirement(ireq),
+ ", ".join(sorted(dependency_strings, key=lambda s: s.lower())) or "-",
+ )
+ )
+ for dependency_string in dependency_strings:
+ yield install_req_from_line(
+ dependency_string, constraint=ireq.constraint, comes_from=ireq
+ )
+
+ def reverse_dependencies(self, ireqs):
+ non_editable = [
+ ireq for ireq in ireqs if not (ireq.editable or is_url_requirement(ireq))
+ ]
+ return self.dependency_cache.reverse_dependencies(non_editable)
diff --git a/third_party/python/pip-tools/piptools/scripts/__init__.py b/third_party/python/pip-tools/piptools/scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/scripts/__init__.py
diff --git a/third_party/python/pip-tools/piptools/scripts/compile.py b/third_party/python/pip-tools/piptools/scripts/compile.py
new file mode 100755
index 0000000000..785afb9bb7
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/scripts/compile.py
@@ -0,0 +1,501 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import os
+import shlex
+import sys
+import tempfile
+import warnings
+
+from click import Command
+from click.utils import safecall
+from pip._internal.commands import create_command
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+
+from .. import click
+from .._compat import parse_requirements
+from ..cache import DependencyCache
+from ..exceptions import PipToolsError
+from ..locations import CACHE_DIR
+from ..logging import log
+from ..repositories import LocalRequirementsRepository, PyPIRepository
+from ..resolver import Resolver
+from ..utils import UNSAFE_PACKAGES, dedup, is_pinned_requirement, key_from_ireq
+from ..writer import OutputWriter
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.in"
+DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt"
+
+
+def _get_default_option(option_name):
+ """
+ Get default value of the pip's option (including option from pip.conf)
+ by a given option name.
+ """
+ install_command = create_command("install")
+ default_values = install_command.parser.get_default_values()
+ return getattr(default_values, option_name)
+
+
+class BaseCommand(Command):
+ _os_args = None
+
+ def parse_args(self, ctx, args):
+ """
+ Override base `parse_args` to store the argument part of `sys.argv`.
+ """
+ self._os_args = set(args)
+ return super(BaseCommand, self).parse_args(ctx, args)
+
+ def has_arg(self, arg_name):
+ """
+ Detect whether a given arg name (including negative counterparts
+ to the arg, e.g. --no-arg) is present in the argument part of `sys.argv`.
+ """
+ command_options = {option.name: option for option in self.params}
+ option = command_options[arg_name]
+ args = set(option.opts + option.secondary_opts)
+ return bool(self._os_args & args)
+
+
+@click.command(
+ cls=BaseCommand, context_settings={"help_option_names": ("-h", "--help")}
+)
+@click.version_option()
+@click.pass_context
+@click.option("-v", "--verbose", count=True, help="Show more output")
+@click.option("-q", "--quiet", count=True, help="Give less output")
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option(
+ "-p",
+ "--pre",
+ is_flag=True,
+ default=None,
+ help="Allow resolving to prereleases (default is not)",
+)
+@click.option(
+ "-r",
+ "--rebuild",
+ is_flag=True,
+ help="Clear any caches upfront, rebuild from scratch",
+)
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+ envvar="PIP_FIND_LINKS",
+)
+@click.option(
+ "-i",
+ "--index-url",
+ help="Change index URL (defaults to {index_url})".format(
+ index_url=redact_auth_from_url(_get_default_option("index_url"))
+ ),
+ envvar="PIP_INDEX_URL",
+)
+@click.option(
+ "--extra-index-url",
+ multiple=True,
+ help="Add additional index URL to search",
+ envvar="PIP_EXTRA_INDEX_URL",
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ envvar="PIP_TRUSTED_HOST",
+ help="Mark this host as trusted, even though it does not have "
+ "valid or any HTTPS.",
+)
+@click.option(
+ "--header/--no-header",
+ is_flag=True,
+ default=True,
+ help="Add header to generated file",
+)
+@click.option(
+ "--index/--no-index",
+ is_flag=True,
+ default=True,
+ help="DEPRECATED: Add index URL to generated file",
+)
+@click.option(
+ "--emit-trusted-host/--no-emit-trusted-host",
+ is_flag=True,
+ default=True,
+ help="Add trusted host option to generated file",
+)
+@click.option(
+ "--annotate/--no-annotate",
+ is_flag=True,
+ default=True,
+ help="Annotate results, indicating where dependencies come from",
+)
+@click.option(
+ "-U",
+ "--upgrade",
+ is_flag=True,
+ default=False,
+ help="Try to upgrade all dependencies to their latest versions",
+)
+@click.option(
+ "-P",
+ "--upgrade-package",
+ "upgrade_packages",
+ nargs=1,
+ multiple=True,
+ help="Specify particular packages to upgrade.",
+)
+@click.option(
+ "-o",
+ "--output-file",
+ nargs=1,
+ default=None,
+ type=click.File("w+b", atomic=True, lazy=True),
+ help=(
+ "Output file name. Required if more than one input file is given. "
+ "Will be derived from input file otherwise."
+ ),
+)
+@click.option(
+ "--allow-unsafe",
+ is_flag=True,
+ default=False,
+ help="Pin packages considered unsafe: {}".format(
+ ", ".join(sorted(UNSAFE_PACKAGES))
+ ),
+)
+@click.option(
+ "--generate-hashes",
+ is_flag=True,
+ default=False,
+ help="Generate pip 8 style hashes in the resulting requirements file.",
+)
+@click.option(
+ "--reuse-hashes/--no-reuse-hashes",
+ is_flag=True,
+ default=True,
+ help=(
+ "Improve the speed of --generate-hashes by reusing the hashes from an "
+ "existing output file."
+ ),
+)
+@click.option(
+ "--max-rounds",
+ default=10,
+ help="Maximum number of rounds before resolving the requirements aborts.",
+)
+@click.argument("src_files", nargs=-1, type=click.Path(exists=True, allow_dash=True))
+@click.option(
+ "--build-isolation/--no-build-isolation",
+ is_flag=True,
+ default=True,
+ help="Enable isolation when building a modern source distribution. "
+ "Build dependencies specified by PEP 518 must be already installed "
+ "if build isolation is disabled.",
+)
+@click.option(
+ "--emit-find-links/--no-emit-find-links",
+ is_flag=True,
+ default=True,
+ help="Add the find-links option to generated file",
+)
+@click.option(
+ "--cache-dir",
+ help="Store the cache data in DIRECTORY.",
+ default=CACHE_DIR,
+ envvar="PIP_TOOLS_CACHE_DIR",
+ show_default=True,
+ show_envvar=True,
+ type=click.Path(file_okay=False, writable=True),
+)
+@click.option("--pip-args", help="Arguments to pass directly to the pip command.")
+@click.option(
+ "--emit-index-url/--no-emit-index-url",
+ is_flag=True,
+ default=True,
+ help="Add index URL to generated file",
+)
+def cli(
+ ctx,
+ verbose,
+ quiet,
+ dry_run,
+ pre,
+ rebuild,
+ find_links,
+ index_url,
+ extra_index_url,
+ cert,
+ client_cert,
+ trusted_host,
+ header,
+ index,
+ emit_trusted_host,
+ annotate,
+ upgrade,
+ upgrade_packages,
+ output_file,
+ allow_unsafe,
+ generate_hashes,
+ reuse_hashes,
+ src_files,
+ max_rounds,
+ build_isolation,
+ emit_find_links,
+ cache_dir,
+ pip_args,
+ emit_index_url,
+):
+ """Compiles requirements.txt from requirements.in specs."""
+ log.verbosity = verbose - quiet
+
+ if len(src_files) == 0:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ elif os.path.exists("setup.py"):
+ src_files = ("setup.py",)
+ else:
+ raise click.BadParameter(
+ (
+ "If you do not specify an input file, "
+ "the default is {} or setup.py"
+ ).format(DEFAULT_REQUIREMENTS_FILE)
+ )
+
+ if not output_file:
+ # An output file must be provided for stdin
+ if src_files == ("-",):
+ raise click.BadParameter("--output-file is required if input is from stdin")
+ # Use default requirements output file if there is a setup.py the source file
+ elif src_files == ("setup.py",):
+ file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE
+ # An output file must be provided if there are multiple source files
+ elif len(src_files) > 1:
+ raise click.BadParameter(
+ "--output-file is required if two or more input files are given."
+ )
+ # Otherwise derive the output file from the source file
+ else:
+ base_name = src_files[0].rsplit(".", 1)[0]
+ file_name = base_name + ".txt"
+
+ output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True)
+
+ # Close the file at the end of the context execution
+ ctx.call_on_close(safecall(output_file.close_intelligently))
+
+ if cli.has_arg("index") and cli.has_arg("emit_index_url"):
+ raise click.BadParameter(
+ "--index/--no-index and --emit-index-url/--no-emit-index-url "
+ "are mutually exclusive."
+ )
+ elif cli.has_arg("index"):
+ warnings.warn(
+ "--index and --no-index are deprecated and will be removed "
+ "in future versions. Use --emit-index-url/--no-emit-index-url instead.",
+ category=FutureWarning,
+ )
+ emit_index_url = index
+
+ ###
+ # Setup
+ ###
+
+ right_args = shlex.split(pip_args or "")
+ pip_args = []
+ if find_links:
+ for link in find_links:
+ pip_args.extend(["-f", link])
+ if index_url:
+ pip_args.extend(["-i", index_url])
+ if extra_index_url:
+ for extra_index in extra_index_url:
+ pip_args.extend(["--extra-index-url", extra_index])
+ if cert:
+ pip_args.extend(["--cert", cert])
+ if client_cert:
+ pip_args.extend(["--client-cert", client_cert])
+ if pre:
+ pip_args.extend(["--pre"])
+ if trusted_host:
+ for host in trusted_host:
+ pip_args.extend(["--trusted-host", host])
+
+ if not build_isolation:
+ pip_args.append("--no-build-isolation")
+ pip_args.extend(right_args)
+
+ repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+
+ # Parse all constraints coming from --upgrade-package/-P
+ upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages)
+ upgrade_install_reqs = {
+ key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen
+ }
+
+ existing_pins_to_upgrade = set()
+
+ # Proxy with a LocalRequirementsRepository if --upgrade is not specified
+ # (= default invocation)
+ if not upgrade and os.path.exists(output_file.name):
+ # Use a temporary repository to ensure outdated(removed) options from
+ # existing requirements.txt wouldn't get into the current repository.
+ tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+ ireqs = parse_requirements(
+ output_file.name,
+ finder=tmp_repository.finder,
+ session=tmp_repository.session,
+ options=tmp_repository.options,
+ )
+
+ # Exclude packages from --upgrade-package/-P from the existing
+ # constraints, and separately gather pins to be upgraded
+ existing_pins = {}
+ for ireq in filter(is_pinned_requirement, ireqs):
+ key = key_from_ireq(ireq)
+ if key in upgrade_install_reqs:
+ existing_pins_to_upgrade.add(key)
+ else:
+ existing_pins[key] = ireq
+ repository = LocalRequirementsRepository(
+ existing_pins, repository, reuse_hashes=reuse_hashes
+ )
+
+ ###
+ # Parsing/collecting initial requirements
+ ###
+
+ constraints = []
+ for src_file in src_files:
+ is_setup_file = os.path.basename(src_file) == "setup.py"
+ if is_setup_file or src_file == "-":
+ # pip requires filenames and not files. Since we want to support
+ # piping from stdin, we need to briefly save the input from stdin
+ # to a temporary file and have pip read that. also used for
+ # reading requirements from install_requires in setup.py.
+ tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ if is_setup_file:
+ from distutils.core import run_setup
+
+ dist = run_setup(src_file)
+ tmpfile.write("\n".join(dist.install_requires))
+ comes_from = "{name} ({filename})".format(
+ name=dist.get_name(), filename=src_file
+ )
+ else:
+ tmpfile.write(sys.stdin.read())
+ comes_from = "-r -"
+ tmpfile.flush()
+ reqs = list(
+ parse_requirements(
+ tmpfile.name,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+ for req in reqs:
+ req.comes_from = comes_from
+ constraints.extend(reqs)
+ else:
+ constraints.extend(
+ parse_requirements(
+ src_file,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+
+ primary_packages = {
+ key_from_ireq(ireq) for ireq in constraints if not ireq.constraint
+ }
+
+ allowed_upgrades = primary_packages | existing_pins_to_upgrade
+ constraints.extend(
+ ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades
+ )
+
+ # Filter out pip environment markers which do not match (PEP496)
+ constraints = [
+ req for req in constraints if req.markers is None or req.markers.evaluate()
+ ]
+
+ log.debug("Using indexes:")
+ with log.indentation():
+ for index_url in dedup(repository.finder.index_urls):
+ log.debug(redact_auth_from_url(index_url))
+
+ if repository.finder.find_links:
+ log.debug("")
+ log.debug("Using links:")
+ with log.indentation():
+ for find_link in dedup(repository.finder.find_links):
+ log.debug(redact_auth_from_url(find_link))
+
+ try:
+ resolver = Resolver(
+ constraints,
+ repository,
+ prereleases=repository.finder.allow_all_prereleases or pre,
+ cache=DependencyCache(cache_dir),
+ clear_caches=rebuild,
+ allow_unsafe=allow_unsafe,
+ )
+ results = resolver.resolve(max_rounds=max_rounds)
+ if generate_hashes:
+ hashes = resolver.resolve_hashes(results)
+ else:
+ hashes = None
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ log.debug("")
+
+ ##
+ # Output
+ ##
+
+ writer = OutputWriter(
+ src_files,
+ output_file,
+ click_ctx=ctx,
+ dry_run=dry_run,
+ emit_header=header,
+ emit_index_url=emit_index_url,
+ emit_trusted_host=emit_trusted_host,
+ annotate=annotate,
+ generate_hashes=generate_hashes,
+ default_index_url=repository.DEFAULT_INDEX_URL,
+ index_urls=repository.finder.index_urls,
+ trusted_hosts=repository.finder.trusted_hosts,
+ format_control=repository.finder.format_control,
+ allow_unsafe=allow_unsafe,
+ find_links=repository.finder.find_links,
+ emit_find_links=emit_find_links,
+ )
+ writer.write(
+ results=results,
+ unsafe_requirements=resolver.unsafe_constraints,
+ markers={
+ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers
+ },
+ hashes=hashes,
+ )
+
+ if dry_run:
+ log.info("Dry-run, so nothing updated.")
diff --git a/third_party/python/pip-tools/piptools/scripts/sync.py b/third_party/python/pip-tools/piptools/scripts/sync.py
new file mode 100755
index 0000000000..fbad5463cd
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/scripts/sync.py
@@ -0,0 +1,217 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import itertools
+import os
+import shlex
+import sys
+
+from pip._internal.commands import create_command
+from pip._internal.utils.misc import get_installed_distributions
+
+from .. import click, sync
+from .._compat import parse_requirements
+from ..exceptions import PipToolsError
+from ..logging import log
+from ..repositories import PyPIRepository
+from ..utils import flat_map
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
+
+
+@click.command(context_settings={"help_option_names": ("-h", "--help")})
+@click.version_option()
+@click.option(
+ "-a",
+ "--ask",
+ is_flag=True,
+ help="Show what would happen, then ask whether to continue",
+)
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option("--force", is_flag=True, help="Proceed even if conflicts are found")
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+ envvar="PIP_FIND_LINKS",
+)
+@click.option(
+ "-i",
+ "--index-url",
+ help="Change index URL (defaults to PyPI)",
+ envvar="PIP_INDEX_URL",
+)
+@click.option(
+ "--extra-index-url",
+ multiple=True,
+ help="Add additional index URL to search",
+ envvar="PIP_EXTRA_INDEX_URL",
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ help="Mark this host as trusted, even though it does not have valid or any HTTPS.",
+)
+@click.option(
+ "--no-index",
+ is_flag=True,
+ help="Ignore package index (only looking at --find-links URLs instead)",
+)
+@click.option("-q", "--quiet", default=False, is_flag=True, help="Give less output")
+@click.option(
+ "--user", "user_only", is_flag=True, help="Restrict attention to user directory"
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
+@click.option("--pip-args", help="Arguments to pass directly to pip install.")
+def cli(
+ ask,
+ dry_run,
+ force,
+ find_links,
+ index_url,
+ extra_index_url,
+ trusted_host,
+ no_index,
+ quiet,
+ user_only,
+ cert,
+ client_cert,
+ src_files,
+ pip_args,
+):
+ """Synchronize virtual environment with requirements.txt."""
+ if not src_files:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ else:
+ msg = "No requirement files given and no {} found in the current directory"
+ log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))
+ sys.exit(2)
+
+ if any(src_file.endswith(".in") for src_file in src_files):
+ msg = (
+ "Some input files have the .in extension, which is most likely an error "
+ "and can cause weird behaviour. You probably meant to use "
+ "the corresponding *.txt file?"
+ )
+ if force:
+ log.warning("WARNING: " + msg)
+ else:
+ log.error("ERROR: " + msg)
+ sys.exit(2)
+
+ install_command = create_command("install")
+ options, _ = install_command.parse_args([])
+ session = install_command._build_session(options)
+ finder = install_command._build_package_finder(options=options, session=session)
+
+ # Parse requirements file. Note, all options inside requirements file
+ # will be collected by the finder.
+ requirements = flat_map(
+ lambda src: parse_requirements(src, finder=finder, session=session), src_files
+ )
+
+ try:
+ requirements = sync.merge(requirements, ignore_conflicts=force)
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ installed_dists = get_installed_distributions(skip=[], user_only=user_only)
+ to_install, to_uninstall = sync.diff(requirements, installed_dists)
+
+ install_flags = _compose_install_flags(
+ finder,
+ no_index=no_index,
+ index_url=index_url,
+ extra_index_url=extra_index_url,
+ trusted_host=trusted_host,
+ find_links=find_links,
+ user_only=user_only,
+ cert=cert,
+ client_cert=client_cert,
+ ) + shlex.split(pip_args or "")
+ sys.exit(
+ sync.sync(
+ to_install,
+ to_uninstall,
+ verbose=(not quiet),
+ dry_run=dry_run,
+ install_flags=install_flags,
+ ask=ask,
+ )
+ )
+
+
+def _compose_install_flags(
+ finder,
+ no_index=False,
+ index_url=None,
+ extra_index_url=None,
+ trusted_host=None,
+ find_links=None,
+ user_only=False,
+ cert=None,
+ client_cert=None,
+):
+ """
+ Compose install flags with the given finder and CLI options.
+ """
+ result = []
+
+ # Build --index-url/--extra-index-url/--no-index
+ if no_index:
+ result.append("--no-index")
+ elif index_url:
+ result.extend(["--index-url", index_url])
+ elif finder.index_urls:
+ finder_index_url = finder.index_urls[0]
+ if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL:
+ result.extend(["--index-url", finder_index_url])
+ for extra_index in finder.index_urls[1:]:
+ result.extend(["--extra-index-url", extra_index])
+ else:
+ result.append("--no-index")
+
+ for extra_index in extra_index_url or []:
+ result.extend(["--extra-index-url", extra_index])
+
+ # Build --trusted-hosts
+ for host in itertools.chain(trusted_host or [], finder.trusted_hosts):
+ result.extend(["--trusted-host", host])
+
+ # Build --find-links
+ for link in itertools.chain(find_links or [], finder.find_links):
+ result.extend(["--find-links", link])
+
+ # Build format controls --no-binary/--only-binary
+ for format_control in ("no_binary", "only_binary"):
+ formats = getattr(finder.format_control, format_control)
+ if not formats:
+ continue
+ result.extend(
+ ["--" + format_control.replace("_", "-"), ",".join(sorted(formats))]
+ )
+
+ if user_only:
+ result.append("--user")
+
+ if cert:
+ result.extend(["--cert", cert])
+
+ if client_cert:
+ result.extend(["--client-cert", client_cert])
+
+ return result
diff --git a/third_party/python/pip-tools/piptools/sync.py b/third_party/python/pip-tools/piptools/sync.py
new file mode 100644
index 0000000000..4e2bb49401
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/sync.py
@@ -0,0 +1,223 @@
+import collections
+import os
+import sys
+import tempfile
+from subprocess import check_call # nosec
+
+from pip._internal.commands.freeze import DEV_PKGS
+from pip._internal.utils.compat import stdlib_pkgs
+
+from . import click
+from .exceptions import IncompatibleRequirements
+from .utils import (
+ flat_map,
+ format_requirement,
+ get_hashes_from_ireq,
+ is_url_requirement,
+ key_from_ireq,
+ key_from_req,
+)
+
+PACKAGES_TO_IGNORE = (
+ ["-markerlib", "pip", "pip-tools", "pip-review", "pkg-resources"]
+ + list(stdlib_pkgs)
+ + list(DEV_PKGS)
+)
+
+
+def dependency_tree(installed_keys, root_key):
+ """
+ Calculate the dependency tree for the package `root_key` and return
+ a collection of all its dependencies. Uses a DFS traversal algorithm.
+
+ `installed_keys` should be a {key: requirement} mapping, e.g.
+ {'django': from_line('django==1.8')}
+ `root_key` should be the key to return the dependency tree for.
+ """
+ dependencies = set()
+ queue = collections.deque()
+
+ if root_key in installed_keys:
+ dep = installed_keys[root_key]
+ queue.append(dep)
+
+ while queue:
+ v = queue.popleft()
+ key = key_from_req(v)
+ if key in dependencies:
+ continue
+
+ dependencies.add(key)
+
+ for dep_specifier in v.requires():
+ dep_name = key_from_req(dep_specifier)
+ if dep_name in installed_keys:
+ dep = installed_keys[dep_name]
+
+ if dep_specifier.specifier.contains(dep.version):
+ queue.append(dep)
+
+ return dependencies
+
+
+def get_dists_to_ignore(installed):
+ """
+ Returns a collection of package names to ignore when performing pip-sync,
+ based on the currently installed environment. For example, when pip-tools
+ is installed in the local environment, it should be ignored, including all
+ of its dependencies (e.g. click). When pip-tools is not installed
+ locally, click should also be installed/uninstalled depending on the given
+ requirements.
+ """
+ installed_keys = {key_from_req(r): r for r in installed}
+ return list(
+ flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE)
+ )
+
+
+def merge(requirements, ignore_conflicts):
+ by_key = {}
+
+ for ireq in requirements:
+ # Limitation: URL requirements are merged by precise string match, so
+ # "file:///example.zip#egg=example", "file:///example.zip", and
+ # "example==1.0" will not merge with each other
+ if ireq.match_markers():
+ key = key_from_ireq(ireq)
+
+ if not ignore_conflicts:
+ existing_ireq = by_key.get(key)
+ if existing_ireq:
+ # NOTE: We check equality here since we can assume that the
+ # requirements are all pinned
+ if ireq.specifier != existing_ireq.specifier:
+ raise IncompatibleRequirements(ireq, existing_ireq)
+
+ # TODO: Always pick the largest specifier in case of a conflict
+ by_key[key] = ireq
+ return by_key.values()
+
+
+def diff_key_from_ireq(ireq):
+ """
+ Calculate a key for comparing a compiled requirement with installed modules.
+ For URL requirements, only provide a useful key if the url includes
+ #egg=name==version, which will set ireq.req.name and ireq.specifier.
+ Otherwise return ireq.link so the key will not match and the package will
+ reinstall. Reinstall is necessary to ensure that packages will reinstall
+ if the URL is changed but the version is not.
+ """
+ if is_url_requirement(ireq):
+ if (
+ ireq.req
+ and (getattr(ireq.req, "key", None) or getattr(ireq.req, "name", None))
+ and ireq.specifier
+ ):
+ return key_from_ireq(ireq)
+ return str(ireq.link)
+ return key_from_ireq(ireq)
+
+
+def diff(compiled_requirements, installed_dists):
+ """
+ Calculate which packages should be installed or uninstalled, given a set
+ of compiled requirements and a list of currently installed modules.
+ """
+ requirements_lut = {diff_key_from_ireq(r): r for r in compiled_requirements}
+
+ satisfied = set() # holds keys
+ to_install = set() # holds InstallRequirement objects
+ to_uninstall = set() # holds keys
+
+ pkgs_to_ignore = get_dists_to_ignore(installed_dists)
+ for dist in installed_dists:
+ key = key_from_req(dist)
+ if key not in requirements_lut or not requirements_lut[key].match_markers():
+ to_uninstall.add(key)
+ elif requirements_lut[key].specifier.contains(dist.version):
+ satisfied.add(key)
+
+ for key, requirement in requirements_lut.items():
+ if key not in satisfied and requirement.match_markers():
+ to_install.add(requirement)
+
+ # Make sure to not uninstall any packages that should be ignored
+ to_uninstall -= set(pkgs_to_ignore)
+
+ return (to_install, to_uninstall)
+
+
+def sync(
+ to_install,
+ to_uninstall,
+ verbose=False,
+ dry_run=False,
+ install_flags=None,
+ ask=False,
+):
+ """
+ Install and uninstalls the given sets of modules.
+ """
+ exit_code = 0
+
+ if not to_uninstall and not to_install:
+ if verbose:
+ click.echo("Everything up-to-date")
+ return exit_code
+
+ pip_flags = []
+ if not verbose:
+ pip_flags += ["-q"]
+
+ if ask:
+ dry_run = True
+
+ if dry_run:
+ if to_uninstall:
+ click.echo("Would uninstall:")
+ for pkg in sorted(to_uninstall):
+ click.echo(" {}".format(pkg))
+
+ if to_install:
+ click.echo("Would install:")
+ for ireq in sorted(to_install, key=key_from_ireq):
+ click.echo(" {}".format(format_requirement(ireq)))
+
+ exit_code = 1
+
+ if ask and click.confirm("Would you like to proceed with these changes?"):
+ dry_run = False
+ exit_code = 0
+
+ if not dry_run:
+ if to_uninstall:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "uninstall", "-y"]
+ + pip_flags
+ + sorted(to_uninstall)
+ )
+
+ if to_install:
+ if install_flags is None:
+ install_flags = []
+ # prepare requirement lines
+ req_lines = []
+ for ireq in sorted(to_install, key=key_from_ireq):
+ ireq_hashes = get_hashes_from_ireq(ireq)
+ req_lines.append(format_requirement(ireq, hashes=ireq_hashes))
+
+ # save requirement lines to a temporary file
+ tmp_req_file = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ tmp_req_file.write("\n".join(req_lines))
+ tmp_req_file.close()
+
+ try:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "install", "-r", tmp_req_file.name]
+ + pip_flags
+ + install_flags
+ )
+ finally:
+ os.unlink(tmp_req_file.name)
+
+ return exit_code
diff --git a/third_party/python/pip-tools/piptools/utils.py b/third_party/python/pip-tools/piptools/utils.py
new file mode 100644
index 0000000000..b0eca76a6e
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/utils.py
@@ -0,0 +1,388 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import sys
+from collections import OrderedDict
+from itertools import chain
+
+import six
+from click.utils import LazyFile
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.vcs import is_url
+from six.moves import shlex_quote
+
+from ._compat import PIP_VERSION
+from .click import style
+
+UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"}
+COMPILE_EXCLUDE_OPTIONS = {
+ "--dry-run",
+ "--quiet",
+ "--rebuild",
+ "--upgrade",
+ "--upgrade-package",
+ "--verbose",
+ "--cache-dir",
+}
+
+
+def key_from_ireq(ireq):
+ """Get a standardized key for an InstallRequirement."""
+ if ireq.req is None and ireq.link is not None:
+ return str(ireq.link)
+ else:
+ return key_from_req(ireq.req)
+
+
+def key_from_req(req):
+ """Get an all-lowercase version of the requirement's name."""
+ if hasattr(req, "key"):
+ # from pkg_resources, such as installed dists for pip-sync
+ key = req.key
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ key = req.name
+
+ key = key.replace("_", "-").lower()
+ return key
+
+
+def comment(text):
+ return style(text, fg="green")
+
+
+def make_install_requirement(name, version, extras, constraint=False):
+ # If no extras are specified, the extras string is blank
+ extras_string = ""
+ if extras:
+ # Sort extras for stability
+ extras_string = "[{}]".format(",".join(sorted(extras)))
+
+ return install_req_from_line(
+ str("{}{}=={}".format(name, extras_string, version)), constraint=constraint
+ )
+
+
+def is_url_requirement(ireq):
+ """
+ Return True if requirement was specified as a path or URL.
+ ireq.original_link will have been set by InstallRequirement.__init__
+ """
+ return bool(ireq.original_link)
+
+
+def format_requirement(ireq, marker=None, hashes=None):
+ """
+ Generic formatter for pretty printing InstallRequirements to the terminal
+ in a less verbose way than using its `__str__` method.
+ """
+ if ireq.editable:
+ line = "-e {}".format(ireq.link.url)
+ elif is_url_requirement(ireq):
+ line = ireq.link.url
+ else:
+ line = str(ireq.req).lower()
+
+ if marker:
+ line = "{} ; {}".format(line, marker)
+
+ if hashes:
+ for hash_ in sorted(hashes):
+ line += " \\\n --hash={}".format(hash_)
+
+ return line
+
+
+def format_specifier(ireq):
+ """
+ Generic formatter for pretty printing the specifier part of
+ InstallRequirements to the terminal.
+ """
+ # TODO: Ideally, this is carried over to the pip library itself
+ specs = ireq.specifier._specs if ireq.req is not None else []
+ specs = sorted(specs, key=lambda x: x._spec[1])
+ return ",".join(str(s) for s in specs) or "<any>"
+
+
+def is_pinned_requirement(ireq):
+ """
+ Returns whether an InstallRequirement is a "pinned" requirement.
+
+ An InstallRequirement is considered pinned if:
+
+ - Is not editable
+ - It has exactly one specifier
+ - That specifier is "=="
+ - The version does not contain a wildcard
+
+ Examples:
+ django==1.8 # pinned
+ django>1.8 # NOT pinned
+ django~=1.8 # NOT pinned
+ django==1.* # NOT pinned
+ """
+ if ireq.editable:
+ return False
+
+ if ireq.req is None or len(ireq.specifier._specs) != 1:
+ return False
+
+ op, version = next(iter(ireq.specifier._specs))._spec
+ return (op == "==" or op == "===") and not version.endswith(".*")
+
+
+def as_tuple(ireq):
+ """
+ Pulls out the (name: str, version:str, extras:(str)) tuple from
+ the pinned InstallRequirement.
+ """
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq))
+
+ name = key_from_ireq(ireq)
+ version = next(iter(ireq.specifier._specs))._spec[1]
+ extras = tuple(sorted(ireq.extras))
+ return name, version, extras
+
+
+def flat_map(fn, collection):
+ """Map a function over a collection and flatten the result by one-level"""
+ return chain.from_iterable(map(fn, collection))
+
+
+def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):
+ """
+ Builds a dict-based lookup table (index) elegantly.
+
+ Supports building normal and unique lookup tables. For example:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {
+ ... 'b': {'bar', 'baz'},
+ ... 'f': {'foo'},
+ ... 'q': {'quux', 'qux'}
+ ... }
+
+ For key functions that uniquely identify values, set unique=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... unique=True) == {
+ ... 'b': 'baz',
+ ... 'f': 'foo',
+ ... 'q': 'quux'
+ ... }
+
+ For the values represented as lists, set use_lists=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... use_lists=True) == {
+ ... 'b': ['bar', 'baz'],
+ ... 'f': ['foo'],
+ ... 'q': ['qux', 'quux']
+ ... }
+
+ The values of the resulting lookup table will be lists, not sets.
+
+ For extra power, you can even change the values while building up the LUT.
+ To do so, use the `keyval` function instead of the `key` arg:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'],
+ ... keyval=lambda s: (s[0], s[1:])) == {
+ ... 'b': {'ar', 'az'},
+ ... 'f': {'oo'},
+ ... 'q': {'uux', 'ux'}
+ ... }
+
+ """
+ if keyval is None:
+ if key is None:
+
+ def keyval(v):
+ return v
+
+ else:
+
+ def keyval(v):
+ return (key(v), v)
+
+ if unique:
+ return dict(keyval(v) for v in values)
+
+ lut = {}
+ for value in values:
+ k, v = keyval(value)
+ try:
+ s = lut[k]
+ except KeyError:
+ if use_lists:
+ s = lut[k] = list()
+ else:
+ s = lut[k] = set()
+ if use_lists:
+ s.append(v)
+ else:
+ s.add(v)
+ return dict(lut)
+
+
+def dedup(iterable):
+ """Deduplicate an iterable object like iter(set(iterable)) but
+ order-preserved.
+ """
+ return iter(OrderedDict.fromkeys(iterable))
+
+
+def name_from_req(req):
+ """Get the name of the requirement"""
+ if hasattr(req, "project_name"):
+ # from pkg_resources, such as installed dists for pip-sync
+ return req.project_name
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ return req.name
+
+
+def fs_str(string):
+ """
+ Convert given string to a correctly encoded filesystem string.
+
+ On Python 2, if the input string is unicode, converts it to bytes
+ encoded with the filesystem encoding.
+
+ On Python 3 returns the string as is, since Python 3 uses unicode
+ paths and the input string shouldn't be bytes.
+
+ :type string: str|unicode
+ :rtype: str
+ """
+ if isinstance(string, str):
+ return string
+ if isinstance(string, bytes):
+ raise AssertionError
+ return string.encode(_fs_encoding)
+
+
+_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+
+
+def get_hashes_from_ireq(ireq):
+ """
+ Given an InstallRequirement, return a list of string hashes in
+ the format "{algorithm}:{hash}". Return an empty list if there are no hashes
+ in the requirement options.
+ """
+ result = []
+ if PIP_VERSION[:2] <= (20, 0):
+ ireq_hashes = ireq.options.get("hashes", {})
+ else:
+ ireq_hashes = ireq.hash_options
+ for algorithm, hexdigests in ireq_hashes.items():
+ for hash_ in hexdigests:
+ result.append("{}:{}".format(algorithm, hash_))
+ return result
+
+
+def force_text(s):
+ """
+ Return a string representing `s`.
+ """
+ if s is None:
+ return ""
+ if not isinstance(s, six.string_types):
+ return six.text_type(s)
+ return s
+
+
+def get_compile_command(click_ctx):
+ """
+ Returns a normalized compile command depending on cli context.
+
+ The command will be normalized by:
+ - expanding options short to long
+ - removing values that are already default
+ - sorting the arguments
+ - removing one-off arguments like '--upgrade'
+ - removing arguments that don't change build behaviour like '--verbose'
+ """
+ from piptools.scripts.compile import cli
+
+ # Map of the compile cli options (option name -> click.Option)
+ compile_options = {option.name: option for option in cli.params}
+
+ left_args = []
+ right_args = []
+
+ for option_name, value in click_ctx.params.items():
+ option = compile_options[option_name]
+
+ # Get the latest option name (usually it'll be a long name)
+ option_long_name = option.opts[-1]
+
+ # Collect variadic args separately, they will be added
+ # at the end of the command later
+ if option.nargs < 0:
+ # These will necessarily be src_files
+ # Re-add click-stripped '--' if any start with '-'
+ if any(val.startswith("-") and val != "-" for val in value):
+ right_args.append("--")
+ right_args.extend([shlex_quote(force_text(val)) for val in value])
+ continue
+
+ # Exclude one-off options (--upgrade/--upgrade-package/--rebuild/...)
+ # or options that don't change compile behaviour (--verbose/--dry-run/...)
+ if option_long_name in COMPILE_EXCLUDE_OPTIONS:
+ continue
+
+ # Skip options without a value
+ if option.default is None and not value:
+ continue
+
+ # Skip options with a default value
+ if option.default == value:
+ continue
+
+ # Use a file name for file-like objects
+ if isinstance(value, LazyFile):
+ value = value.name
+
+ # Convert value to the list
+ if not isinstance(value, (tuple, list)):
+ value = [value]
+
+ for val in value:
+ # Flags don't have a value, thus add to args true or false option long name
+ if option.is_flag:
+ # If there are false-options, choose an option name depending on a value
+ if option.secondary_opts:
+ # Get the latest false-option
+ secondary_option_long_name = option.secondary_opts[-1]
+ arg = option_long_name if val else secondary_option_long_name
+ # There are no false-options, use true-option
+ else:
+ arg = option_long_name
+ left_args.append(shlex_quote(arg))
+ # Append to args the option with a value
+ else:
+ if isinstance(val, six.string_types) and is_url(val):
+ val = redact_auth_from_url(val)
+ if option.name == "pip_args":
+ # shlex_quote would produce functional but noisily quoted results,
+ # e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"''
+ # Instead, we try to get more legible quoting via repr:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=repr(fs_str(force_text(val)))
+ )
+ )
+ else:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=shlex_quote(force_text(val))
+ )
+ )
+
+ return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args))
diff --git a/third_party/python/pip-tools/piptools/writer.py b/third_party/python/pip-tools/piptools/writer.py
new file mode 100644
index 0000000000..3bb2325fc1
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/writer.py
@@ -0,0 +1,239 @@
+from __future__ import unicode_literals
+
+import os
+import re
+from itertools import chain
+
+import six
+
+from .click import unstyle
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ comment,
+ dedup,
+ format_requirement,
+ get_compile_command,
+ key_from_ireq,
+)
+
+MESSAGE_UNHASHED_PACKAGE = comment(
+ "# WARNING: pip install will require the following package to be hashed."
+ "\n# Consider using a hashable URL like "
+ "https://github.com/jazzband/pip-tools/archive/SOMECOMMIT.zip"
+)
+
+MESSAGE_UNSAFE_PACKAGES_UNPINNED = comment(
+ "# WARNING: The following packages were not pinned, but pip requires them to be"
+ "\n# pinned when the requirements file includes hashes. "
+ "Consider using the --allow-unsafe flag."
+)
+
+MESSAGE_UNSAFE_PACKAGES = comment(
+ "# The following packages are considered to be unsafe in a requirements file:"
+)
+
+MESSAGE_UNINSTALLABLE = (
+ "The generated requirements file may be rejected by pip install. "
+ "See # WARNING lines for details."
+)
+
+
+strip_comes_from_line_re = re.compile(r" \(line \d+\)$")
+
+
+def _comes_from_as_string(ireq):
+ if isinstance(ireq.comes_from, six.string_types):
+ return strip_comes_from_line_re.sub("", ireq.comes_from)
+ return key_from_ireq(ireq.comes_from)
+
+
+class OutputWriter(object):
+ def __init__(
+ self,
+ src_files,
+ dst_file,
+ click_ctx,
+ dry_run,
+ emit_header,
+ emit_index_url,
+ emit_trusted_host,
+ annotate,
+ generate_hashes,
+ default_index_url,
+ index_urls,
+ trusted_hosts,
+ format_control,
+ allow_unsafe,
+ find_links,
+ emit_find_links,
+ ):
+ self.src_files = src_files
+ self.dst_file = dst_file
+ self.click_ctx = click_ctx
+ self.dry_run = dry_run
+ self.emit_header = emit_header
+ self.emit_index_url = emit_index_url
+ self.emit_trusted_host = emit_trusted_host
+ self.annotate = annotate
+ self.generate_hashes = generate_hashes
+ self.default_index_url = default_index_url
+ self.index_urls = index_urls
+ self.trusted_hosts = trusted_hosts
+ self.format_control = format_control
+ self.allow_unsafe = allow_unsafe
+ self.find_links = find_links
+ self.emit_find_links = emit_find_links
+
+ def _sort_key(self, ireq):
+ return (not ireq.editable, str(ireq.req).lower())
+
+ def write_header(self):
+ if self.emit_header:
+ yield comment("#")
+ yield comment("# This file is autogenerated by pip-compile")
+ yield comment("# To update, run:")
+ yield comment("#")
+ compile_command = os.environ.get(
+ "CUSTOM_COMPILE_COMMAND"
+ ) or get_compile_command(self.click_ctx)
+ yield comment("# {}".format(compile_command))
+ yield comment("#")
+
+ def write_index_options(self):
+ if self.emit_index_url:
+ for index, index_url in enumerate(dedup(self.index_urls)):
+ if index_url.rstrip("/") == self.default_index_url:
+ continue
+ flag = "--index-url" if index == 0 else "--extra-index-url"
+ yield "{} {}".format(flag, index_url)
+
+ def write_trusted_hosts(self):
+ if self.emit_trusted_host:
+ for trusted_host in dedup(self.trusted_hosts):
+ yield "--trusted-host {}".format(trusted_host)
+
+ def write_format_controls(self):
+ for nb in dedup(sorted(self.format_control.no_binary)):
+ yield "--no-binary {}".format(nb)
+ for ob in dedup(sorted(self.format_control.only_binary)):
+ yield "--only-binary {}".format(ob)
+
+ def write_find_links(self):
+ if self.emit_find_links:
+ for find_link in dedup(self.find_links):
+ yield "--find-links {}".format(find_link)
+
+ def write_flags(self):
+ emitted = False
+ for line in chain(
+ self.write_index_options(),
+ self.write_find_links(),
+ self.write_trusted_hosts(),
+ self.write_format_controls(),
+ ):
+ emitted = True
+ yield line
+ if emitted:
+ yield ""
+
+ def _iter_lines(self, results, unsafe_requirements=None, markers=None, hashes=None):
+ # default values
+ unsafe_requirements = unsafe_requirements or []
+ markers = markers or {}
+ hashes = hashes or {}
+
+ # Check for unhashed or unpinned packages if at least one package does have
+ # hashes, which will trigger pip install's --require-hashes mode.
+ warn_uninstallable = False
+ has_hashes = hashes and any(hash for hash in hashes.values())
+
+ yielded = False
+
+ for line in self.write_header():
+ yield line
+ yielded = True
+ for line in self.write_flags():
+ yield line
+ yielded = True
+
+ unsafe_requirements = (
+ {r for r in results if r.name in UNSAFE_PACKAGES}
+ if not unsafe_requirements
+ else unsafe_requirements
+ )
+ packages = {r for r in results if r.name not in UNSAFE_PACKAGES}
+
+ if packages:
+ packages = sorted(packages, key=self._sort_key)
+ for ireq in packages:
+ if has_hashes and not hashes.get(ireq):
+ yield MESSAGE_UNHASHED_PACKAGE
+ warn_uninstallable = True
+ line = self._format_requirement(
+ ireq, markers.get(key_from_ireq(ireq)), hashes=hashes
+ )
+ yield line
+ yielded = True
+
+ if unsafe_requirements:
+ unsafe_requirements = sorted(unsafe_requirements, key=self._sort_key)
+ yield ""
+ yielded = True
+ if has_hashes and not self.allow_unsafe:
+ yield MESSAGE_UNSAFE_PACKAGES_UNPINNED
+ warn_uninstallable = True
+ else:
+ yield MESSAGE_UNSAFE_PACKAGES
+
+ for ireq in unsafe_requirements:
+ ireq_key = key_from_ireq(ireq)
+ if not self.allow_unsafe:
+ yield comment("# {}".format(ireq_key))
+ else:
+ line = self._format_requirement(
+ ireq, marker=markers.get(ireq_key), hashes=hashes
+ )
+ yield line
+
+ # Yield even when there's no real content, so that blank files are written
+ if not yielded:
+ yield ""
+
+ if warn_uninstallable:
+ log.warning(MESSAGE_UNINSTALLABLE)
+
+ def write(self, results, unsafe_requirements, markers, hashes):
+
+ for line in self._iter_lines(results, unsafe_requirements, markers, hashes):
+ log.info(line)
+ if not self.dry_run:
+ self.dst_file.write(unstyle(line).encode("utf-8"))
+ self.dst_file.write(os.linesep.encode("utf-8"))
+
+ def _format_requirement(self, ireq, marker=None, hashes=None):
+ ireq_hashes = (hashes if hashes is not None else {}).get(ireq)
+
+ line = format_requirement(ireq, marker=marker, hashes=ireq_hashes)
+
+ if not self.annotate:
+ return line
+
+ # Annotate what packages or reqs-ins this package is required by
+ required_by = set()
+ if hasattr(ireq, "_source_ireqs"):
+ required_by |= {
+ _comes_from_as_string(src_ireq)
+ for src_ireq in ireq._source_ireqs
+ if src_ireq.comes_from
+ }
+ elif ireq.comes_from:
+ required_by.add(_comes_from_as_string(ireq))
+ if required_by:
+ annotation = ", ".join(sorted(required_by))
+ line = "{:24}{}{}".format(
+ line,
+ " \\\n " if ireq_hashes else " ",
+ comment("# via " + annotation),
+ )
+ return line
diff --git a/third_party/python/pip-tools/setup.cfg b/third_party/python/pip-tools/setup.cfg
new file mode 100644
index 0000000000..3c597b0632
--- /dev/null
+++ b/third_party/python/pip-tools/setup.cfg
@@ -0,0 +1,85 @@
+[metadata]
+name = pip-tools
+url = https://github.com/jazzband/pip-tools/
+license = BSD
+author = Vincent Driessen
+author_email = me@nvie.com
+description = pip-tools keeps your pinned dependencies fresh.
+long_description = file: README.rst
+classifiers =
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ Intended Audience :: System Administrators
+ License :: OSI Approved :: BSD License
+ Operating System :: OS Independent
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.5
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: Implementation :: CPython
+ Programming Language :: Python :: Implementation :: PyPy
+ Topic :: System :: Systems Administration
+
+[options]
+python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+setup_requires = setuptools_scm
+packages = find:
+zip_safe = false
+install_requires =
+ click >= 7
+ six
+ pip >= 20.0
+
+[options.packages.find]
+exclude = tests
+
+[options.extras_require]
+testing =
+ mock
+ pytest
+ pytest-rerunfailures
+coverage = pytest-cov
+
+[options.entry_points]
+console_scripts =
+ pip-compile = piptools.scripts.compile:cli
+ pip-sync = piptools.scripts.sync:cli
+
+[bdist_wheel]
+universal = 1
+
+[tool:pytest]
+norecursedirs = .* build dist venv test_data piptools/_compat/*
+testpaths = tests piptools
+filterwarnings =
+ ignore::PendingDeprecationWarning:pip\._vendor.+
+ ignore::DeprecationWarning:pip\._vendor.+
+markers =
+ network: mark tests that require internet access
+
+[flake8]
+max-line-length = 88
+exclude = build/*, dist/*, pip_tools.egg-info/*, piptools/_compat/*, .tox/*, .venv/*, .git/*, .eggs/*
+extend-ignore = E203 # E203 conflicts with PEP8; see https://github.com/psf/black#slices
+pytest-fixture-no-parentheses = true
+pytest-parametrize-names-type = tuple
+pytest-parametrize-values-type = tuple
+pytest-parametrize-values-row-type = tuple
+
+[isort]
+combine_as_imports = True
+forced_separate = piptools
+include_trailing_comma = True
+line_length = 88
+multi_line_output = 3
+default_section = THIRDPARTY
+known_first_party = piptools, tests, examples
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pip-tools/setup.py b/third_party/python/pip-tools/setup.py
new file mode 100644
index 0000000000..d5d43d7c93
--- /dev/null
+++ b/third_party/python/pip-tools/setup.py
@@ -0,0 +1,3 @@
+from setuptools import setup
+
+setup(use_scm_version=True)
diff --git a/third_party/python/pip-tools/tox.ini b/third_party/python/pip-tools/tox.ini
new file mode 100644
index 0000000000..f0439eaf70
--- /dev/null
+++ b/third_party/python/pip-tools/tox.ini
@@ -0,0 +1,52 @@
+[tox]
+envlist =
+ # NOTE: keep this in sync with the env list in .travis.yml for tox-travis.
+ py{27,35,36,37,38,39,py,py3}-pip{20.0,20.1,20.2,latest,master}-coverage
+ checkqa
+ readme
+skip_missing_interpreters = True
+
+[testenv]
+extras =
+ testing
+ coverage: coverage
+deps =
+ pipmaster: -e git+https://github.com/pypa/pip.git@master#egg=pip
+; TODO: remove all 20.0 mentions after pip-20.2 being released
+ pip20.0: pip==20.0.*
+ pip20.1: pip==20.1.*
+ pip20.2: pip==20.2.*
+setenv =
+ piplatest: PIP=latest
+ pipmaster: PIP=master
+ pip20.0: PIP==20.0
+ pip20.1: PIP==20.1
+ pip20.2: PIP==20.2
+
+ coverage: PYTEST_ADDOPTS=--strict --doctest-modules --cov --cov-report=term-missing --cov-report=xml {env:PYTEST_ADDOPTS:}
+commands_pre =
+ piplatest: python -m pip install -U pip
+ pip --version
+commands = pytest {posargs}
+passenv = CI GITHUB_ACTIONS
+pip_pre=True
+
+[testenv:checkqa]
+basepython = python3
+skip_install = True
+deps = pre-commit
+commands_pre =
+commands = pre-commit run --all-files --show-diff-on-failure
+
+[testenv:readme]
+deps = twine
+commands_pre =
+commands = twine check {distdir}/*
+
+[travis:env]
+PIP =
+ 20.0: pip20.0
+ 20.1: pip20.1
+ 20.2: pip20.2
+ latest: piplatest
+ master: pipmaster