summaryrefslogtreecommitdiffstats
path: root/third_party/python/pip_tools/piptools
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/python/pip_tools/piptools')
-rw-r--r--third_party/python/pip_tools/piptools/__init__.py11
-rw-r--r--third_party/python/pip_tools/piptools/__main__.py17
-rw-r--r--third_party/python/pip_tools/piptools/_compat/__init__.py26
-rw-r--r--third_party/python/pip_tools/piptools/_compat/contextlib.py18
-rw-r--r--third_party/python/pip_tools/piptools/_compat/pip_compat.py18
-rw-r--r--third_party/python/pip_tools/piptools/_compat/tempfile.py88
-rw-r--r--third_party/python/pip_tools/piptools/cache.py173
-rw-r--r--third_party/python/pip_tools/piptools/click.py6
-rw-r--r--third_party/python/pip_tools/piptools/exceptions.py66
-rw-r--r--third_party/python/pip_tools/piptools/locations.py25
-rw-r--r--third_party/python/pip_tools/piptools/logging.py62
-rw-r--r--third_party/python/pip_tools/piptools/repositories/__init__.py3
-rw-r--r--third_party/python/pip_tools/piptools/repositories/base.py57
-rw-r--r--third_party/python/pip_tools/piptools/repositories/local.py97
-rw-r--r--third_party/python/pip_tools/piptools/repositories/pypi.py531
-rw-r--r--third_party/python/pip_tools/piptools/resolver.py405
-rw-r--r--third_party/python/pip_tools/piptools/scripts/__init__.py0
-rw-r--r--third_party/python/pip_tools/piptools/scripts/compile.py495
-rw-r--r--third_party/python/pip_tools/piptools/scripts/sync.py214
-rw-r--r--third_party/python/pip_tools/piptools/sync.py216
-rw-r--r--third_party/python/pip_tools/piptools/utils.py384
-rw-r--r--third_party/python/pip_tools/piptools/writer.py243
22 files changed, 3155 insertions, 0 deletions
diff --git a/third_party/python/pip_tools/piptools/__init__.py b/third_party/python/pip_tools/piptools/__init__.py
new file mode 100644
index 0000000000..9f0c95aa56
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/__init__.py
@@ -0,0 +1,11 @@
+import locale
+
+from piptools.click import secho
+
+# Needed for locale.getpreferredencoding(False) to work
+# in pip._internal.utils.encoding.auto_decode
+try:
+ locale.setlocale(locale.LC_ALL, "")
+except locale.Error as e: # pragma: no cover
+ # setlocale can apparently crash if locale are uninitialized
+ secho("Ignoring error when setting locale: {}".format(e), fg="red")
diff --git a/third_party/python/pip_tools/piptools/__main__.py b/third_party/python/pip_tools/piptools/__main__.py
new file mode 100644
index 0000000000..2d8b75e85d
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/__main__.py
@@ -0,0 +1,17 @@
+import click
+
+from piptools.scripts import compile, sync
+
+
+@click.group()
+def cli():
+ pass
+
+
+cli.add_command(compile.cli, "compile")
+cli.add_command(sync.cli, "sync")
+
+
+# Enable ``python -m piptools ...``.
+if __name__ == "__main__": # pragma: no branch
+ cli()
diff --git a/third_party/python/pip_tools/piptools/_compat/__init__.py b/third_party/python/pip_tools/piptools/_compat/__init__.py
new file mode 100644
index 0000000000..de28628db2
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/__init__.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+# flake8: noqa
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import errno
+import os
+
+from pip._vendor import six
+
+from .pip_compat import PIP_VERSION, parse_requirements
+
+if six.PY2:
+ from .tempfile import TemporaryDirectory
+else:
+ from tempfile import TemporaryDirectory
+
+
+def makedirs(name, mode=0o777, exist_ok=False):
+ if six.PY2:
+ try:
+ os.makedirs(name, mode)
+ except OSError as e:
+ if not exist_ok or e.errno != errno.EEXIST:
+ raise
+ else:
+ os.makedirs(name, mode, exist_ok)
diff --git a/third_party/python/pip_tools/piptools/_compat/contextlib.py b/third_party/python/pip_tools/piptools/_compat/contextlib.py
new file mode 100644
index 0000000000..04039ccb01
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/contextlib.py
@@ -0,0 +1,18 @@
+# Ported from python 3.7 contextlib.py
+class nullcontext(object):
+ """Context manager that does no additional processing.
+ Used as a stand-in for a normal context manager, when a particular
+ block of code is only sometimes used with a normal context manager:
+ cm = optional_cm if condition else nullcontext()
+ with cm:
+ # Perform operation, using optional_cm if condition is True
+ """
+
+ def __init__(self, enter_result=None):
+ self.enter_result = enter_result
+
+ def __enter__(self):
+ return self.enter_result
+
+ def __exit__(self, *excinfo):
+ pass
diff --git a/third_party/python/pip_tools/piptools/_compat/pip_compat.py b/third_party/python/pip_tools/piptools/_compat/pip_compat.py
new file mode 100644
index 0000000000..6cd24a0ff9
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/pip_compat.py
@@ -0,0 +1,18 @@
+# -*- coding=utf-8 -*-
+from __future__ import absolute_import
+
+import pip
+from pip._internal.req import parse_requirements as _parse_requirements
+from pip._internal.req.constructors import install_req_from_parsed_requirement
+from pip._vendor.packaging.version import parse as parse_version
+
+PIP_VERSION = tuple(map(int, parse_version(pip.__version__).base_version.split(".")))
+
+
+def parse_requirements(
+ filename, session, finder=None, options=None, constraint=False, isolated=False
+):
+ for parsed_req in _parse_requirements(
+ filename, session, finder=finder, options=options, constraint=constraint
+ ):
+ yield install_req_from_parsed_requirement(parsed_req, isolated=isolated)
diff --git a/third_party/python/pip_tools/piptools/_compat/tempfile.py b/third_party/python/pip_tools/piptools/_compat/tempfile.py
new file mode 100644
index 0000000000..dc7e9ef997
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/_compat/tempfile.py
@@ -0,0 +1,88 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function
+
+import os as _os
+import sys as _sys
+import warnings as _warnings
+from tempfile import mkdtemp
+
+
+class TemporaryDirectory(object):
+ """Create and return a temporary directory. This has the same
+ behavior as mkdtemp but can be used as a context manager. For
+ example:
+
+ with TemporaryDirectory() as tmpdir:
+ ...
+
+ Upon exiting the context, the directory and everything contained
+ in it are removed.
+ """
+
+ def __init__(self, suffix="", prefix="tmp", dir=None):
+ self._closed = False
+ self.name = None # Handle mkdtemp raising an exception
+ self.name = mkdtemp(suffix, prefix, dir)
+
+ def __repr__(self):
+ return "<{} {!r}>".format(self.__class__.__name__, self.name)
+
+ def __enter__(self):
+ return self.name
+
+ def cleanup(self):
+ if self.name and not self._closed:
+ try:
+ self._rmtree(self.name)
+ except (TypeError, AttributeError) as ex:
+ # Issue #10188: Emit a warning on stderr
+ # if the directory could not be cleaned
+ # up due to missing globals
+ if "None" not in str(ex):
+ raise
+ print(
+ "ERROR: {!r} while cleaning up {!r}".format(ex, self),
+ file=_sys.stderr,
+ )
+ return
+ self._closed = True
+
+ def __exit__(self, exc, value, tb):
+ self.cleanup()
+
+ def __del__(self):
+ # Issue a ResourceWarning if implicit cleanup needed
+ self.cleanup()
+
+ # XXX (ncoghlan): The following code attempts to make
+ # this class tolerant of the module nulling out process
+ # that happens during CPython interpreter shutdown
+ # Alas, it doesn't actually manage it. See issue #10188
+ _listdir = staticmethod(_os.listdir)
+ _path_join = staticmethod(_os.path.join)
+ _isdir = staticmethod(_os.path.isdir)
+ _islink = staticmethod(_os.path.islink)
+ _remove = staticmethod(_os.remove)
+ _rmdir = staticmethod(_os.rmdir)
+ _warn = _warnings.warn
+
+ def _rmtree(self, path):
+ # Essentially a stripped down version of shutil.rmtree. We can't
+ # use globals because they may be None'ed out at shutdown.
+ for name in self._listdir(path):
+ fullname = self._path_join(path, name)
+ try:
+ isdir = self._isdir(fullname) and not self._islink(fullname)
+ except OSError:
+ isdir = False
+ if isdir:
+ self._rmtree(fullname)
+ else:
+ try:
+ self._remove(fullname)
+ except OSError:
+ pass
+ try:
+ self._rmdir(path)
+ except OSError:
+ pass
diff --git a/third_party/python/pip_tools/piptools/cache.py b/third_party/python/pip_tools/piptools/cache.py
new file mode 100644
index 0000000000..301d38bd52
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/cache.py
@@ -0,0 +1,173 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import errno
+import json
+import os
+import platform
+import sys
+
+from pip._vendor.packaging.requirements import Requirement
+
+from ._compat import makedirs
+from .exceptions import PipToolsError
+from .utils import as_tuple, key_from_req, lookup_table
+
+_PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"}
+
+
+def _implementation_name():
+ """similar to PEP 425, however the minor version is separated from the
+ major to differentation "3.10" and "31.0".
+ """
+ implementation_name = platform.python_implementation().lower()
+ implementation = _PEP425_PY_TAGS.get(implementation_name, "??")
+ return "{}{}.{}".format(implementation, *sys.version_info)
+
+
+class CorruptCacheError(PipToolsError):
+ def __init__(self, path):
+ self.path = path
+
+ def __str__(self):
+ lines = [
+ "The dependency cache seems to have been corrupted.",
+ "Inspect, or delete, the following file:",
+ " {}".format(self.path),
+ ]
+ return os.linesep.join(lines)
+
+
+def read_cache_file(cache_file_path):
+ with open(cache_file_path, "r") as cache_file:
+ try:
+ doc = json.load(cache_file)
+ except ValueError:
+ raise CorruptCacheError(cache_file_path)
+
+ # Check version and load the contents
+ if doc["__format__"] != 1:
+ raise ValueError("Unknown cache file format")
+ return doc["dependencies"]
+
+
+class DependencyCache(object):
+ """
+ Creates a new persistent dependency cache for the current Python version.
+ The cache file is written to the appropriate user cache dir for the
+ current platform, i.e.
+
+ ~/.cache/pip-tools/depcache-pyX.Y.json
+
+ Where py indicates the Python implementation.
+ Where X.Y indicates the Python version.
+ """
+
+ def __init__(self, cache_dir):
+ makedirs(cache_dir, exist_ok=True)
+ cache_filename = "depcache-{}.json".format(_implementation_name())
+
+ self._cache_file = os.path.join(cache_dir, cache_filename)
+ self._cache = None
+
+ @property
+ def cache(self):
+ """
+ The dictionary that is the actual in-memory cache. This property
+ lazily loads the cache from disk.
+ """
+ if self._cache is None:
+ self.read_cache()
+ return self._cache
+
+ def as_cache_key(self, ireq):
+ """
+ Given a requirement, return its cache key. This behavior is a little weird
+ in order to allow backwards compatibility with cache files. For a requirement
+ without extras, this will return, for example:
+
+ ("ipython", "2.1.0")
+
+ For a requirement with extras, the extras will be comma-separated and appended
+ to the version, inside brackets, like so:
+
+ ("ipython", "2.1.0[nbconvert,notebook]")
+ """
+ name, version, extras = as_tuple(ireq)
+ if not extras:
+ extras_string = ""
+ else:
+ extras_string = "[{}]".format(",".join(extras))
+ return name, "{}{}".format(version, extras_string)
+
+ def read_cache(self):
+ """Reads the cached contents into memory."""
+ try:
+ self._cache = read_cache_file(self._cache_file)
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ self._cache = {}
+
+ def write_cache(self):
+ """Writes the cache to disk as JSON."""
+ doc = {"__format__": 1, "dependencies": self._cache}
+ with open(self._cache_file, "w") as f:
+ json.dump(doc, f, sort_keys=True)
+
+ def clear(self):
+ self._cache = {}
+ self.write_cache()
+
+ def __contains__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return pkgversion_and_extras in self.cache.get(pkgname, {})
+
+ def __getitem__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return self.cache[pkgname][pkgversion_and_extras]
+
+ def __setitem__(self, ireq, values):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ self.cache.setdefault(pkgname, {})
+ self.cache[pkgname][pkgversion_and_extras] = values
+ self.write_cache()
+
+ def reverse_dependencies(self, ireqs):
+ """
+ Returns a lookup table of reverse dependencies for all the given ireqs.
+
+ Since this is all static, it only works if the dependency cache
+ contains the complete data, otherwise you end up with a partial view.
+ This is typically no problem if you use this function after the entire
+ dependency tree is resolved.
+ """
+ ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs]
+ return self._reverse_dependencies(ireqs_as_cache_values)
+
+ def _reverse_dependencies(self, cache_keys):
+ """
+ Returns a lookup table of reverse dependencies for all the given cache keys.
+
+ Example input:
+
+ [('pep8', '1.5.7'),
+ ('flake8', '2.4.0'),
+ ('mccabe', '0.3'),
+ ('pyflakes', '0.8.1')]
+
+ Example output:
+
+ {'pep8': ['flake8'],
+ 'flake8': [],
+ 'mccabe': ['flake8'],
+ 'pyflakes': ['flake8']}
+
+ """
+ # First, collect all the dependencies into a sequence of (parent, child)
+ # tuples, like [('flake8', 'pep8'), ('flake8', 'mccabe'), ...]
+ return lookup_table(
+ (key_from_req(Requirement(dep_name)), name)
+ for name, version_and_extras in cache_keys
+ for dep_name in self.cache[name][version_and_extras]
+ )
diff --git a/third_party/python/pip_tools/piptools/click.py b/third_party/python/pip_tools/piptools/click.py
new file mode 100644
index 0000000000..86f1612c6a
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/click.py
@@ -0,0 +1,6 @@
+from __future__ import absolute_import
+
+import click
+from click import * # noqa
+
+click.disable_unicode_literals_warning = True
diff --git a/third_party/python/pip_tools/piptools/exceptions.py b/third_party/python/pip_tools/piptools/exceptions.py
new file mode 100644
index 0000000000..5278972741
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/exceptions.py
@@ -0,0 +1,66 @@
+from pip._internal.utils.misc import redact_auth_from_url
+
+
+class PipToolsError(Exception):
+ pass
+
+
+class NoCandidateFound(PipToolsError):
+ def __init__(self, ireq, candidates_tried, finder):
+ self.ireq = ireq
+ self.candidates_tried = candidates_tried
+ self.finder = finder
+
+ def __str__(self):
+ versions = []
+ pre_versions = []
+
+ for candidate in sorted(self.candidates_tried):
+ version = str(candidate.version)
+ if candidate.version.is_prerelease:
+ pre_versions.append(version)
+ else:
+ versions.append(version)
+
+ lines = ["Could not find a version that matches {}".format(self.ireq)]
+
+ if versions:
+ lines.append("Tried: {}".format(", ".join(versions)))
+
+ if pre_versions:
+ if self.finder.allow_all_prereleases:
+ line = "Tried"
+ else:
+ line = "Skipped"
+
+ line += " pre-versions: {}".format(", ".join(pre_versions))
+ lines.append(line)
+
+ if versions or pre_versions:
+ lines.append(
+ "There are incompatible versions in the resolved dependencies:"
+ )
+ source_ireqs = getattr(self.ireq, "_source_ireqs", [])
+ lines.extend(" {}".format(ireq) for ireq in source_ireqs)
+ else:
+ redacted_urls = tuple(
+ redact_auth_from_url(url) for url in self.finder.index_urls
+ )
+ lines.append("No versions found")
+ lines.append(
+ "{} {} reachable?".format(
+ "Were" if len(redacted_urls) > 1 else "Was",
+ " or ".join(redacted_urls),
+ )
+ )
+ return "\n".join(lines)
+
+
+class IncompatibleRequirements(PipToolsError):
+ def __init__(self, ireq_a, ireq_b):
+ self.ireq_a = ireq_a
+ self.ireq_b = ireq_b
+
+ def __str__(self):
+ message = "Incompatible requirements found: {} and {}"
+ return message.format(self.ireq_a, self.ireq_b)
diff --git a/third_party/python/pip_tools/piptools/locations.py b/third_party/python/pip_tools/piptools/locations.py
new file mode 100644
index 0000000000..9ca0ffe436
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/locations.py
@@ -0,0 +1,25 @@
+import os
+from shutil import rmtree
+
+from pip._internal.utils.appdirs import user_cache_dir
+
+from .click import secho
+
+# The user_cache_dir helper comes straight from pip itself
+CACHE_DIR = user_cache_dir("pip-tools")
+
+# NOTE
+# We used to store the cache dir under ~/.pip-tools, which is not the
+# preferred place to store caches for any platform. This has been addressed
+# in pip-tools==1.0.5, but to be good citizens, we point this out explicitly
+# to the user when this directory is still found.
+LEGACY_CACHE_DIR = os.path.expanduser("~/.pip-tools")
+
+if os.path.exists(LEGACY_CACHE_DIR):
+ secho(
+ "Removing old cache dir {} (new cache dir is {})".format(
+ LEGACY_CACHE_DIR, CACHE_DIR
+ ),
+ fg="yellow",
+ )
+ rmtree(LEGACY_CACHE_DIR)
diff --git a/third_party/python/pip_tools/piptools/logging.py b/third_party/python/pip_tools/piptools/logging.py
new file mode 100644
index 0000000000..dcf068f7a2
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/logging.py
@@ -0,0 +1,62 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import contextlib
+import logging
+import sys
+
+from . import click
+
+# Initialise the builtin logging module for other component using it.
+# Ex: pip
+logging.basicConfig()
+
+
+class LogContext(object):
+ stream = sys.stderr
+
+ def __init__(self, verbosity=0, indent_width=2):
+ self.verbosity = verbosity
+ self.current_indent = 0
+ self._indent_width = indent_width
+
+ def log(self, message, *args, **kwargs):
+ kwargs.setdefault("err", True)
+ prefix = " " * self.current_indent
+ click.secho(prefix + message, *args, **kwargs)
+
+ def debug(self, *args, **kwargs):
+ if self.verbosity >= 1:
+ self.log(*args, **kwargs)
+
+ def info(self, *args, **kwargs):
+ if self.verbosity >= 0:
+ self.log(*args, **kwargs)
+
+ def warning(self, *args, **kwargs):
+ kwargs.setdefault("fg", "yellow")
+ self.log(*args, **kwargs)
+
+ def error(self, *args, **kwargs):
+ kwargs.setdefault("fg", "red")
+ self.log(*args, **kwargs)
+
+ def _indent(self):
+ self.current_indent += self._indent_width
+
+ def _dedent(self):
+ self.current_indent -= self._indent_width
+
+ @contextlib.contextmanager
+ def indentation(self):
+ """
+ Increase indentation.
+ """
+ self._indent()
+ try:
+ yield
+ finally:
+ self._dedent()
+
+
+log = LogContext()
diff --git a/third_party/python/pip_tools/piptools/repositories/__init__.py b/third_party/python/pip_tools/piptools/repositories/__init__.py
new file mode 100644
index 0000000000..ce5142e8c6
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/__init__.py
@@ -0,0 +1,3 @@
+# flake8: noqa
+from .local import LocalRequirementsRepository
+from .pypi import PyPIRepository
diff --git a/third_party/python/pip_tools/piptools/repositories/base.py b/third_party/python/pip_tools/piptools/repositories/base.py
new file mode 100644
index 0000000000..54849cb7f8
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/base.py
@@ -0,0 +1,57 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+
+from pip._vendor.six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class BaseRepository(object):
+ def clear_caches(self):
+ """Should clear any caches used by the implementation."""
+
+ @abstractmethod
+ @contextmanager
+ def freshen_build_caches(self):
+ """Should start with fresh build/source caches."""
+
+ @abstractmethod
+ def find_best_match(self, ireq):
+ """
+ Return a Version object that indicates the best match for the given
+ InstallRequirement according to the repository.
+ """
+
+ @abstractmethod
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+
+ @abstractmethod
+ def get_hashes(self, ireq):
+ """
+ Given a pinned InstallRequire, returns a set of hashes that represent
+ all of the files for a given requirement. It is not acceptable for an
+ editable or unpinned requirement to be passed to this function.
+ """
+
+ @abstractmethod
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+ """
+
+ @abstractmethod
+ def copy_ireq_dependencies(self, source, dest):
+ """
+ Notifies the repository that `dest` is a copy of `source`, and so it
+ has the same dependencies. Otherwise, once we prepare an ireq to assign
+ it its name, we would lose track of those dependencies on combining
+ that ireq with others.
+ """
diff --git a/third_party/python/pip_tools/piptools/repositories/local.py b/third_party/python/pip_tools/piptools/repositories/local.py
new file mode 100644
index 0000000000..f185f35c3c
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/local.py
@@ -0,0 +1,97 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from contextlib import contextmanager
+
+from pip._internal.utils.hashes import FAVORITE_HASH
+
+from piptools.utils import as_tuple, key_from_ireq, make_install_requirement
+
+from .base import BaseRepository
+
+
+def ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ """
+ Return True if the given InstallationRequirement is satisfied by the
+ previously encountered version pin.
+ """
+ version = next(iter(existing_pin.req.specifier)).version
+ return ireq.req.specifier.contains(
+ version, prereleases=existing_pin.req.specifier.prereleases
+ )
+
+
+class LocalRequirementsRepository(BaseRepository):
+ """
+ The LocalRequirementsRepository proxied the _real_ repository by first
+ checking if a requirement can be satisfied by existing pins (i.e. the
+ result of a previous compile step).
+
+ In effect, if a requirement can be satisfied with a version pinned in the
+ requirements file, we prefer that version over the best match found in
+ PyPI. This keeps updates to the requirements.txt down to a minimum.
+ """
+
+ def __init__(self, existing_pins, proxied_repository, reuse_hashes=True):
+ self._reuse_hashes = reuse_hashes
+ self.repository = proxied_repository
+ self.existing_pins = existing_pins
+
+ @property
+ def options(self):
+ return self.repository.options
+
+ @property
+ def finder(self):
+ return self.repository.finder
+
+ @property
+ def session(self):
+ return self.repository.session
+
+ @property
+ def DEFAULT_INDEX_URL(self):
+ return self.repository.DEFAULT_INDEX_URL
+
+ def clear_caches(self):
+ self.repository.clear_caches()
+
+ @contextmanager
+ def freshen_build_caches(self):
+ with self.repository.freshen_build_caches():
+ yield
+
+ def find_best_match(self, ireq, prereleases=None):
+ key = key_from_ireq(ireq)
+ existing_pin = self.existing_pins.get(key)
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ project, version, _ = as_tuple(existing_pin)
+ return make_install_requirement(
+ project, version, ireq.extras, constraint=ireq.constraint
+ )
+ else:
+ return self.repository.find_best_match(ireq, prereleases)
+
+ def get_dependencies(self, ireq):
+ return self.repository.get_dependencies(ireq)
+
+ def get_hashes(self, ireq):
+ existing_pin = self._reuse_hashes and self.existing_pins.get(
+ key_from_ireq(ireq)
+ )
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ hashes = existing_pin.hash_options
+ hexdigests = hashes.get(FAVORITE_HASH)
+ if hexdigests:
+ return {
+ ":".join([FAVORITE_HASH, hexdigest]) for hexdigest in hexdigests
+ }
+ return self.repository.get_hashes(ireq)
+
+ @contextmanager
+ def allow_all_wheels(self):
+ with self.repository.allow_all_wheels():
+ yield
+
+ def copy_ireq_dependencies(self, source, dest):
+ self.repository.copy_ireq_dependencies(source, dest)
diff --git a/third_party/python/pip_tools/piptools/repositories/pypi.py b/third_party/python/pip_tools/piptools/repositories/pypi.py
new file mode 100644
index 0000000000..7a988bfc1f
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/repositories/pypi.py
@@ -0,0 +1,531 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import collections
+import hashlib
+import itertools
+import logging
+import os
+from contextlib import contextmanager
+from shutil import rmtree
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli.progress_bars import BAR_TYPES
+from pip._internal.commands import create_command
+from pip._internal.models.index import PackageIndex, PyPI
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import get_requirement_tracker
+from pip._internal.utils.hashes import FAVORITE_HASH
+from pip._internal.utils.logging import indent_log, setup_logging
+from pip._internal.utils.misc import normalize_path
+from pip._internal.utils.temp_dir import TempDirectory, global_tempdir_manager
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._vendor.requests import RequestException
+
+from .._compat import PIP_VERSION, TemporaryDirectory, contextlib, makedirs
+from ..click import progressbar
+from ..exceptions import NoCandidateFound
+from ..logging import log
+from ..utils import (
+ as_tuple,
+ fs_str,
+ is_pinned_requirement,
+ is_url_requirement,
+ lookup_table,
+ make_install_requirement,
+)
+from .base import BaseRepository
+
+FILE_CHUNK_SIZE = 4096
+FileStream = collections.namedtuple("FileStream", "stream size")
+
+
+class PyPIRepository(BaseRepository):
+ DEFAULT_INDEX_URL = PyPI.simple_url
+ HASHABLE_PACKAGE_TYPES = {"bdist_wheel", "sdist"}
+
+ """
+ The PyPIRepository will use the provided Finder instance to lookup
+ packages. Typically, it looks up packages on PyPI (the default implicit
+ config), but any other PyPI mirror can be used if index_urls is
+ changed/configured on the Finder.
+ """
+
+ def __init__(self, pip_args, cache_dir):
+ # Use pip's parser for pip.conf management and defaults.
+ # General options (find_links, index_url, extra_index_url, trusted_host,
+ # and pre) are deferred to pip.
+ self.command = create_command("install")
+ extra_pip_args = (
+ []
+ if PIP_VERSION[:2] <= (20, 2)
+ else ["--use-deprecated", "legacy-resolver"]
+ )
+ self.options, _ = self.command.parse_args(pip_args + extra_pip_args)
+ if self.options.cache_dir:
+ self.options.cache_dir = normalize_path(self.options.cache_dir)
+
+ self.options.require_hashes = False
+ self.options.ignore_dependencies = False
+
+ self.session = self.command._build_session(self.options)
+ self.finder = self.command._build_package_finder(
+ options=self.options, session=self.session
+ )
+
+ # Caches
+ # stores project_name => InstallationCandidate mappings for all
+ # versions reported by PyPI, so we only have to ask once for each
+ # project
+ self._available_candidates_cache = {}
+
+ # stores InstallRequirement => list(InstallRequirement) mappings
+ # of all secondary dependencies for the given requirement, so we
+ # only have to go to disk once for each requirement
+ self._dependencies_cache = {}
+
+ # Setup file paths
+ self._build_dir = None
+ self._source_dir = None
+ self._cache_dir = normalize_path(cache_dir)
+ self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs"))
+ if PIP_VERSION[:2] <= (20, 2):
+ self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels"))
+
+ self._setup_logging()
+
+ @contextmanager
+ def freshen_build_caches(self):
+ """
+ Start with fresh build/source caches. Will remove any old build
+ caches from disk automatically.
+ """
+ self._build_dir = TemporaryDirectory(fs_str("build"))
+ self._source_dir = TemporaryDirectory(fs_str("source"))
+ try:
+ yield
+ finally:
+ self._build_dir.cleanup()
+ self._build_dir = None
+ self._source_dir.cleanup()
+ self._source_dir = None
+
+ @property
+ def build_dir(self):
+ return self._build_dir.name if self._build_dir else None
+
+ @property
+ def source_dir(self):
+ return self._source_dir.name if self._source_dir else None
+
+ def clear_caches(self):
+ rmtree(self._download_dir, ignore_errors=True)
+ if PIP_VERSION[:2] <= (20, 2):
+ rmtree(self._wheel_download_dir, ignore_errors=True)
+
+ def find_all_candidates(self, req_name):
+ if req_name not in self._available_candidates_cache:
+ candidates = self.finder.find_all_candidates(req_name)
+ self._available_candidates_cache[req_name] = candidates
+ return self._available_candidates_cache[req_name]
+
+ def find_best_match(self, ireq, prereleases=None):
+ """
+ Returns a Version object that indicates the best match for the given
+ InstallRequirement according to the external repository.
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ return ireq # return itself as the best match
+
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = ireq.specifier.filter(
+ (candidate.version for candidate in all_candidates), prereleases=prereleases
+ )
+
+ matching_candidates = list(
+ itertools.chain.from_iterable(
+ candidates_by_version[ver] for ver in matching_versions
+ )
+ )
+ if not matching_candidates:
+ raise NoCandidateFound(ireq, all_candidates, self.finder)
+
+ evaluator = self.finder.make_candidate_evaluator(ireq.name)
+ best_candidate_result = evaluator.compute_best_candidate(matching_candidates)
+ best_candidate = best_candidate_result.best_candidate
+
+ # Turn the candidate into a pinned InstallRequirement
+ return make_install_requirement(
+ best_candidate.name,
+ best_candidate.version,
+ ireq.extras,
+ constraint=ireq.constraint,
+ )
+
+ def resolve_reqs(self, download_dir, ireq, wheel_cache):
+ with get_requirement_tracker() as req_tracker, TempDirectory(
+ kind="resolver"
+ ) as temp_dir, indent_log():
+ preparer_kwargs = dict(
+ temp_build_dir=temp_dir,
+ options=self.options,
+ req_tracker=req_tracker,
+ session=self.session,
+ finder=self.finder,
+ use_user_site=False,
+ download_dir=download_dir,
+ )
+ if PIP_VERSION[:2] <= (20, 2):
+ preparer_kwargs["wheel_download_dir"] = self._wheel_download_dir
+ preparer = self.command.make_requirement_preparer(**preparer_kwargs)
+
+ reqset = RequirementSet()
+ if PIP_VERSION[:2] <= (20, 1):
+ ireq.is_direct = True
+ else:
+ ireq.user_supplied = True
+ reqset.add_requirement(ireq)
+
+ resolver = self.command.make_resolver(
+ preparer=preparer,
+ finder=self.finder,
+ options=self.options,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ ignore_installed=True,
+ ignore_requires_python=False,
+ force_reinstall=False,
+ upgrade_strategy="to-satisfy-only",
+ )
+ results = resolver._resolve_one(reqset, ireq)
+ if not ireq.prepared:
+ # If still not prepared, e.g. a constraint, do enough to assign
+ # the ireq a name:
+ if PIP_VERSION[:2] <= (20, 2):
+ resolver._get_abstract_dist_for(ireq)
+ else:
+ resolver._get_dist_for(ireq)
+
+ return set(results)
+
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+ if not (
+ ireq.editable or is_url_requirement(ireq) or is_pinned_requirement(ireq)
+ ):
+ raise TypeError(
+ "Expected url, pinned or editable InstallRequirement, got {}".format(
+ ireq
+ )
+ )
+
+ if ireq not in self._dependencies_cache:
+ if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)):
+ # No download_dir for locally available editable requirements.
+ # If a download_dir is passed, pip will unnecessarily archive
+ # the entire source directory
+ download_dir = None
+ elif ireq.link and ireq.link.is_vcs:
+ # No download_dir for VCS sources. This also works around pip
+ # using git-checkout-index, which gets rid of the .git dir.
+ download_dir = None
+ else:
+ download_dir = self._get_download_path(ireq)
+ makedirs(download_dir, exist_ok=True)
+ if PIP_VERSION[:2] <= (20, 2):
+ makedirs(self._wheel_download_dir, exist_ok=True)
+
+ with global_tempdir_manager():
+ wheel_cache = WheelCache(self._cache_dir, self.options.format_control)
+ self._dependencies_cache[ireq] = self.resolve_reqs(
+ download_dir, ireq, wheel_cache
+ )
+
+ return self._dependencies_cache[ireq]
+
+ def copy_ireq_dependencies(self, source, dest):
+ try:
+ self._dependencies_cache[dest] = self._dependencies_cache[source]
+ except KeyError:
+ # `source` may not be in cache yet.
+ pass
+
+ def _get_project(self, ireq):
+ """
+ Return a dict of a project info from PyPI JSON API for a given
+ InstallRequirement. Return None on HTTP/JSON error or if a package
+ is not found on PyPI server.
+
+ API reference: https://warehouse.readthedocs.io/api-reference/json/
+ """
+ package_indexes = (
+ PackageIndex(url=index_url, file_storage_domain="")
+ for index_url in self.finder.search_scope.index_urls
+ )
+ for package_index in package_indexes:
+ url = "{url}/{name}/json".format(url=package_index.pypi_url, name=ireq.name)
+ try:
+ response = self.session.get(url)
+ except RequestException as e:
+ log.debug(
+ "Fetch package info from PyPI failed: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+
+ # Skip this PyPI server, because there is no package
+ # or JSON API might be not supported
+ if response.status_code == 404:
+ continue
+
+ try:
+ data = response.json()
+ except ValueError as e:
+ log.debug(
+ "Cannot parse JSON response from PyPI: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+ return data
+ return None
+
+ def _get_download_path(self, ireq):
+ """
+ Determine the download dir location in a way which avoids name
+ collisions.
+ """
+ if ireq.link:
+ salt = hashlib.sha224(ireq.link.url_without_fragment.encode()).hexdigest()
+ # Nest directories to avoid running out of top level dirs on some FS
+ # (see pypi _get_cache_path_parts, which inspired this)
+ salt = [salt[:2], salt[2:4], salt[4:6], salt[6:]]
+ return os.path.join(self._download_dir, *salt)
+ else:
+ return self._download_dir
+
+ def get_hashes(self, ireq):
+ """
+ Given an InstallRequirement, return a set of hashes that represent all
+ of the files for a given requirement. Unhashable requirements return an
+ empty set. Unpinned requirements raise a TypeError.
+ """
+
+ if ireq.link:
+ link = ireq.link
+
+ if link.is_vcs or (link.is_file and link.is_existing_dir()):
+ # Return empty set for unhashable requirements.
+ # Unhashable logic modeled on pip's
+ # RequirementPreparer.prepare_linked_requirement
+ return set()
+
+ if is_url_requirement(ireq):
+ # Directly hash URL requirements.
+ # URL requirements may have been previously downloaded and cached
+ # locally by self.resolve_reqs()
+ cached_path = os.path.join(self._get_download_path(ireq), link.filename)
+ if os.path.exists(cached_path):
+ cached_link = Link(path_to_url(cached_path))
+ else:
+ cached_link = link
+ return {self._get_file_hash(cached_link)}
+
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected pinned requirement, got {}".format(ireq))
+
+ log.debug(ireq.name)
+
+ with log.indentation():
+ hashes = self._get_hashes_from_pypi(ireq)
+ if hashes is None:
+ log.log("Couldn't get hashes from PyPI, fallback to hashing files")
+ return self._get_hashes_from_files(ireq)
+
+ return hashes
+
+ def _get_hashes_from_pypi(self, ireq):
+ """
+ Return a set of hashes from PyPI JSON API for a given InstallRequirement.
+ Return None if fetching data is failed or missing digests.
+ """
+ project = self._get_project(ireq)
+ if project is None:
+ return None
+
+ _, version, _ = as_tuple(ireq)
+
+ try:
+ release_files = project["releases"][version]
+ except KeyError:
+ log.debug("Missing release files on PyPI")
+ return None
+
+ try:
+ hashes = {
+ "{algo}:{digest}".format(
+ algo=FAVORITE_HASH, digest=file_["digests"][FAVORITE_HASH]
+ )
+ for file_ in release_files
+ if file_["packagetype"] in self.HASHABLE_PACKAGE_TYPES
+ }
+ except KeyError:
+ log.debug("Missing digests of release files on PyPI")
+ return None
+
+ return hashes
+
+ def _get_hashes_from_files(self, ireq):
+ """
+ Return a set of hashes for all release files of a given InstallRequirement.
+ """
+ # We need to get all of the candidates that match our current version
+ # pin, these will represent all of the files that could possibly
+ # satisfy this constraint.
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = list(
+ ireq.specifier.filter((candidate.version for candidate in all_candidates))
+ )
+ matching_candidates = candidates_by_version[matching_versions[0]]
+
+ return {
+ self._get_file_hash(candidate.link) for candidate in matching_candidates
+ }
+
+ def _get_file_hash(self, link):
+ log.debug("Hashing {}".format(link.show_url))
+ h = hashlib.new(FAVORITE_HASH)
+ with open_local_or_remote_file(link, self.session) as f:
+ # Chunks to iterate
+ chunks = iter(lambda: f.stream.read(FILE_CHUNK_SIZE), b"")
+
+ # Choose a context manager depending on verbosity
+ if log.verbosity >= 1:
+ iter_length = f.size / FILE_CHUNK_SIZE if f.size else None
+ bar_template = "{prefix} |%(bar)s| %(info)s".format(
+ prefix=" " * log.current_indent
+ )
+ context_manager = progressbar(
+ chunks,
+ length=iter_length,
+ # Make it look like default pip progress bar
+ fill_char="â–ˆ",
+ empty_char=" ",
+ bar_template=bar_template,
+ width=32,
+ )
+ else:
+ context_manager = contextlib.nullcontext(chunks)
+
+ # Iterate over the chosen context manager
+ with context_manager as bar:
+ for chunk in bar:
+ h.update(chunk)
+ return ":".join([FAVORITE_HASH, h.hexdigest()])
+
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+
+ This also saves the candidate cache and set a new one, or else the results from
+ the previous non-patched calls will interfere.
+ """
+
+ def _wheel_supported(self, tags=None):
+ # Ignore current platform. Support everything.
+ return True
+
+ def _wheel_support_index_min(self, tags=None):
+ # All wheels are equal priority for sorting.
+ return 0
+
+ original_wheel_supported = Wheel.supported
+ original_support_index_min = Wheel.support_index_min
+ original_cache = self._available_candidates_cache
+
+ Wheel.supported = _wheel_supported
+ Wheel.support_index_min = _wheel_support_index_min
+ self._available_candidates_cache = {}
+
+ try:
+ yield
+ finally:
+ Wheel.supported = original_wheel_supported
+ Wheel.support_index_min = original_support_index_min
+ self._available_candidates_cache = original_cache
+
+ def _setup_logging(self):
+ """
+ Setup pip's logger. Ensure pip is verbose same as pip-tools and sync
+ pip's log stream with LogContext.stream.
+ """
+ # Default pip's logger is noisy, so decrease it's verbosity
+ setup_logging(
+ verbosity=log.verbosity - 1,
+ no_color=self.options.no_color,
+ user_log_file=self.options.log,
+ )
+
+ # Sync pip's console handler stream with LogContext.stream
+ logger = logging.getLogger()
+ for handler in logger.handlers:
+ if handler.name == "console": # pragma: no branch
+ handler.stream = log.stream
+ break
+ else: # pragma: no cover
+ # There is always a console handler. This warning would be a signal that
+ # this block should be removed/revisited, because of pip possibly
+ # refactored-out logging config.
+ log.warning("Couldn't find a 'console' logging handler")
+
+ # Sync pip's progress bars stream with LogContext.stream
+ for bar_cls in itertools.chain(*BAR_TYPES.values()):
+ bar_cls.file = log.stream
+
+
+@contextmanager
+def open_local_or_remote_file(link, session):
+ """
+ Open local or remote file for reading.
+
+ :type link: pip.index.Link
+ :type session: requests.Session
+ :raises ValueError: If link points to a local directory.
+ :return: a context manager to a FileStream with the opened file-like object
+ """
+ url = link.url_without_fragment
+
+ if link.is_file:
+ # Local URL
+ local_path = url_to_path(url)
+ if os.path.isdir(local_path):
+ raise ValueError("Cannot open directory for read: {}".format(url))
+ else:
+ st = os.stat(local_path)
+ with open(local_path, "rb") as local_file:
+ yield FileStream(stream=local_file, size=st.st_size)
+ else:
+ # Remote URL
+ headers = {"Accept-Encoding": "identity"}
+ response = session.get(url, headers=headers, stream=True)
+
+ # Content length must be int or None
+ try:
+ content_length = int(response.headers["content-length"])
+ except (ValueError, KeyError, TypeError):
+ content_length = None
+
+ try:
+ yield FileStream(stream=response.raw, size=content_length)
+ finally:
+ response.close()
diff --git a/third_party/python/pip_tools/piptools/resolver.py b/third_party/python/pip_tools/piptools/resolver.py
new file mode 100644
index 0000000000..d46a04a9e3
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/resolver.py
@@ -0,0 +1,405 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import copy
+from functools import partial
+from itertools import chain, count, groupby
+
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.req.req_tracker import update_env_context_manager
+
+from . import click
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ format_requirement,
+ format_specifier,
+ is_pinned_requirement,
+ is_url_requirement,
+ key_from_ireq,
+)
+
+green = partial(click.style, fg="green")
+magenta = partial(click.style, fg="magenta")
+
+
+class RequirementSummary(object):
+ """
+ Summary of a requirement's properties for comparison purposes.
+ """
+
+ def __init__(self, ireq):
+ self.req = ireq.req
+ self.key = key_from_ireq(ireq)
+ self.extras = frozenset(ireq.extras)
+ self.specifier = ireq.specifier
+
+ def __eq__(self, other):
+ return (
+ self.key == other.key
+ and self.specifier == other.specifier
+ and self.extras == other.extras
+ )
+
+ def __hash__(self):
+ return hash((self.key, self.specifier, self.extras))
+
+ def __str__(self):
+ return repr((self.key, str(self.specifier), sorted(self.extras)))
+
+
+def combine_install_requirements(repository, ireqs):
+ """
+ Return a single install requirement that reflects a combination of
+ all the inputs.
+ """
+ # We will store the source ireqs in a _source_ireqs attribute;
+ # if any of the inputs have this, then use those sources directly.
+ source_ireqs = []
+ for ireq in ireqs:
+ source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
+
+ # Optimization. Don't bother with combination logic.
+ if len(source_ireqs) == 1:
+ return source_ireqs[0]
+
+ # deepcopy the accumulator so as to not modify the inputs
+ combined_ireq = copy.deepcopy(source_ireqs[0])
+ repository.copy_ireq_dependencies(source_ireqs[0], combined_ireq)
+
+ for ireq in source_ireqs[1:]:
+ # NOTE we may be losing some info on dropped reqs here
+ combined_ireq.req.specifier &= ireq.req.specifier
+ if combined_ireq.constraint:
+ # We don't find dependencies for constraint ireqs, so copy them
+ # from non-constraints:
+ repository.copy_ireq_dependencies(ireq, combined_ireq)
+ combined_ireq.constraint &= ireq.constraint
+ # Return a sorted, de-duped tuple of extras
+ combined_ireq.extras = tuple(
+ sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
+ )
+
+ # InstallRequirements objects are assumed to come from only one source, and
+ # so they support only a single comes_from entry. This function breaks this
+ # model. As a workaround, we deterministically choose a single source for
+ # the comes_from entry, and add an extra _source_ireqs attribute to keep
+ # track of multiple sources for use within pip-tools.
+ if len(source_ireqs) > 1:
+ if any(ireq.comes_from is None for ireq in source_ireqs):
+ # None indicates package was directly specified.
+ combined_ireq.comes_from = None
+ else:
+ # Populate the comes_from field from one of the sources.
+ # Requirement input order is not stable, so we need to sort:
+ # We choose the shortest entry in order to keep the printed
+ # representation as concise as possible.
+ combined_ireq.comes_from = min(
+ (ireq.comes_from for ireq in source_ireqs),
+ key=lambda x: (len(str(x)), str(x)),
+ )
+ combined_ireq._source_ireqs = source_ireqs
+ return combined_ireq
+
+
+class Resolver(object):
+ def __init__(
+ self,
+ constraints,
+ repository,
+ cache,
+ prereleases=False,
+ clear_caches=False,
+ allow_unsafe=False,
+ ):
+ """
+ This class resolves a given set of constraints (a collection of
+ InstallRequirement objects) by consulting the given Repository and the
+ DependencyCache.
+ """
+ self.our_constraints = set(constraints)
+ self.their_constraints = set()
+ self.repository = repository
+ self.dependency_cache = cache
+ self.prereleases = prereleases
+ self.clear_caches = clear_caches
+ self.allow_unsafe = allow_unsafe
+ self.unsafe_constraints = set()
+
+ @property
+ def constraints(self):
+ return set(
+ self._group_constraints(chain(self.our_constraints, self.their_constraints))
+ )
+
+ def resolve_hashes(self, ireqs):
+ """
+ Finds acceptable hashes for all of the given InstallRequirements.
+ """
+ log.debug("")
+ log.debug("Generating hashes:")
+ with self.repository.allow_all_wheels(), log.indentation():
+ return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
+
+ def resolve(self, max_rounds=10):
+ """
+ Finds concrete package versions for all the given InstallRequirements
+ and their recursive dependencies. The end result is a flat list of
+ (name, version) tuples. (Or an editable package.)
+
+ Resolves constraints one round at a time, until they don't change
+ anymore. Protects against infinite loops by breaking out after a max
+ number rounds.
+ """
+ if self.clear_caches:
+ self.dependency_cache.clear()
+ self.repository.clear_caches()
+
+ # Ignore existing packages
+ # NOTE: str() wrapping necessary for Python 2/3 compat
+ with update_env_context_manager(PIP_EXISTS_ACTION=str("i")):
+ for current_round in count(start=1): # pragma: no branch
+ if current_round > max_rounds:
+ raise RuntimeError(
+ "No stable configuration of concrete packages "
+ "could be found for the given constraints after "
+ "{max_rounds} rounds of resolving.\n"
+ "This is likely a bug.".format(max_rounds=max_rounds)
+ )
+
+ log.debug("")
+ log.debug(magenta("{:^60}".format("ROUND {}".format(current_round))))
+ # If a package version (foo==2.0) was built in a previous round,
+ # and in this round a different version of foo needs to be built
+ # (i.e. foo==1.0), the directory will exist already, which will
+ # cause a pip build failure. The trick is to start with a new
+ # build cache dir for every round, so this can never happen.
+ with self.repository.freshen_build_caches():
+ has_changed, best_matches = self._resolve_one_round()
+ log.debug("-" * 60)
+ log.debug(
+ "Result of round {}: {}".format(
+ current_round,
+ "not stable" if has_changed else "stable, done",
+ )
+ )
+ if not has_changed:
+ break
+
+ # Only include hard requirements and not pip constraints
+ results = {req for req in best_matches if not req.constraint}
+
+ # Filter out unsafe requirements.
+ self.unsafe_constraints = set()
+ if not self.allow_unsafe:
+ # reverse_dependencies is used to filter out packages that are only
+ # required by unsafe packages. This logic is incomplete, as it would
+ # fail to filter sub-sub-dependencies of unsafe packages. None of the
+ # UNSAFE_PACKAGES currently have any dependencies at all (which makes
+ # sense for installation tools) so this seems sufficient.
+ reverse_dependencies = self.reverse_dependencies(results)
+ for req in results.copy():
+ required_by = reverse_dependencies.get(req.name.lower(), [])
+ if req.name in UNSAFE_PACKAGES or (
+ required_by and all(name in UNSAFE_PACKAGES for name in required_by)
+ ):
+ self.unsafe_constraints.add(req)
+ results.remove(req)
+
+ return results
+
+ def _group_constraints(self, constraints):
+ """
+ Groups constraints (remember, InstallRequirements!) by their key name,
+ and combining their SpecifierSets into a single InstallRequirement per
+ package. For example, given the following constraints:
+
+ Django<1.9,>=1.4.2
+ django~=1.5
+ Flask~=0.7
+
+ This will be combined into a single entry per package:
+
+ django~=1.5,<1.9,>=1.4.2
+ flask~=0.7
+
+ """
+ constraints = list(constraints)
+ for ireq in constraints:
+ if ireq.name is None:
+ # get_dependencies has side-effect of assigning name to ireq
+ # (so we can group by the name below).
+ self.repository.get_dependencies(ireq)
+
+ # Sort first by name, i.e. the groupby key. Then within each group,
+ # sort editables first.
+ # This way, we don't bother with combining editables, since the first
+ # ireq will be editable, if one exists.
+ for _, ireqs in groupby(
+ sorted(constraints, key=(lambda x: (key_from_ireq(x), not x.editable))),
+ key=key_from_ireq,
+ ):
+ yield combine_install_requirements(self.repository, ireqs)
+
+ def _resolve_one_round(self):
+ """
+ Resolves one level of the current constraints, by finding the best
+ match for each package in the repository and adding all requirements
+ for those best package versions. Some of these constraints may be new
+ or updated.
+
+ Returns whether new constraints appeared in this round. If no
+ constraints were added or changed, this indicates a stable
+ configuration.
+ """
+ # Sort this list for readability of terminal output
+ constraints = sorted(self.constraints, key=key_from_ireq)
+
+ log.debug("Current constraints:")
+ with log.indentation():
+ for constraint in constraints:
+ log.debug(str(constraint))
+
+ log.debug("")
+ log.debug("Finding the best candidates:")
+ with log.indentation():
+ best_matches = {self.get_best_match(ireq) for ireq in constraints}
+
+ # Find the new set of secondary dependencies
+ log.debug("")
+ log.debug("Finding secondary dependencies:")
+
+ their_constraints = []
+ with log.indentation():
+ for best_match in best_matches:
+ their_constraints.extend(self._iter_dependencies(best_match))
+ # Grouping constraints to make clean diff between rounds
+ theirs = set(self._group_constraints(their_constraints))
+
+ # NOTE: We need to compare RequirementSummary objects, since
+ # InstallRequirement does not define equality
+ diff = {RequirementSummary(t) for t in theirs} - {
+ RequirementSummary(t) for t in self.their_constraints
+ }
+ removed = {RequirementSummary(t) for t in self.their_constraints} - {
+ RequirementSummary(t) for t in theirs
+ }
+
+ has_changed = len(diff) > 0 or len(removed) > 0
+ if has_changed:
+ log.debug("")
+ log.debug("New dependencies found in this round:")
+ with log.indentation():
+ for new_dependency in sorted(diff, key=key_from_ireq):
+ log.debug("adding {}".format(new_dependency))
+ log.debug("Removed dependencies in this round:")
+ with log.indentation():
+ for removed_dependency in sorted(removed, key=key_from_ireq):
+ log.debug("removing {}".format(removed_dependency))
+
+ # Store the last round's results in the their_constraints
+ self.their_constraints = theirs
+ return has_changed, best_matches
+
+ def get_best_match(self, ireq):
+ """
+ Returns a (pinned or editable) InstallRequirement, indicating the best
+ match to use for the given InstallRequirement (in the form of an
+ InstallRequirement).
+
+ Example:
+ Given the constraint Flask>=0.10, may return Flask==0.10.1 at
+ a certain moment in time.
+
+ Pinned requirements will always return themselves, i.e.
+
+ Flask==0.10.1 => Flask==0.10.1
+
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif is_pinned_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif ireq.constraint:
+ # NOTE: This is not a requirement (yet) and does not need
+ # to be resolved
+ best_match = ireq
+ else:
+ best_match = self.repository.find_best_match(
+ ireq, prereleases=self.prereleases
+ )
+
+ # Format the best match
+ log.debug(
+ "found candidate {} (constraint was {})".format(
+ format_requirement(best_match), format_specifier(ireq)
+ )
+ )
+ best_match.comes_from = ireq.comes_from
+ if hasattr(ireq, "_source_ireqs"):
+ best_match._source_ireqs = ireq._source_ireqs
+ return best_match
+
+ def _iter_dependencies(self, ireq):
+ """
+ Given a pinned, url, or editable InstallRequirement, collects all the
+ secondary dependencies for them, either by looking them up in a local
+ cache, or by reaching out to the repository.
+
+ Editable requirements will never be looked up, as they may have
+ changed at any time.
+ """
+ # Pip does not resolve dependencies of constraints. We skip handling
+ # constraints here as well to prevent the cache from being polluted.
+ # Constraints that are later determined to be dependencies will be
+ # marked as non-constraints in later rounds by
+ # `combine_install_requirements`, and will be properly resolved.
+ # See https://github.com/pypa/pip/
+ # blob/6896dfcd831330c13e076a74624d95fa55ff53f4/src/pip/_internal/
+ # legacy_resolve.py#L325
+ if ireq.constraint:
+ return
+
+ if ireq.editable or is_url_requirement(ireq):
+ for dependency in self.repository.get_dependencies(ireq):
+ yield dependency
+ return
+ elif not is_pinned_requirement(ireq):
+ raise TypeError(
+ "Expected pinned or editable requirement, got {}".format(ireq)
+ )
+
+ # Now, either get the dependencies from the dependency cache (for
+ # speed), or reach out to the external repository to
+ # download and inspect the package version and get dependencies
+ # from there
+ if ireq not in self.dependency_cache:
+ log.debug(
+ "{} not in cache, need to check index".format(format_requirement(ireq)),
+ fg="yellow",
+ )
+ dependencies = self.repository.get_dependencies(ireq)
+ self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies)
+
+ # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
+ dependency_strings = self.dependency_cache[ireq]
+ log.debug(
+ "{:25} requires {}".format(
+ format_requirement(ireq),
+ ", ".join(sorted(dependency_strings, key=lambda s: s.lower())) or "-",
+ )
+ )
+ for dependency_string in dependency_strings:
+ yield install_req_from_line(
+ dependency_string, constraint=ireq.constraint, comes_from=ireq
+ )
+
+ def reverse_dependencies(self, ireqs):
+ non_editable = [
+ ireq for ireq in ireqs if not (ireq.editable or is_url_requirement(ireq))
+ ]
+ return self.dependency_cache.reverse_dependencies(non_editable)
diff --git a/third_party/python/pip_tools/piptools/scripts/__init__.py b/third_party/python/pip_tools/piptools/scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/scripts/__init__.py
diff --git a/third_party/python/pip_tools/piptools/scripts/compile.py b/third_party/python/pip_tools/piptools/scripts/compile.py
new file mode 100644
index 0000000000..ca650e4913
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/scripts/compile.py
@@ -0,0 +1,495 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import os
+import shlex
+import sys
+import tempfile
+import warnings
+
+from click import Command
+from click.utils import safecall
+from pip._internal.commands import create_command
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+
+from .. import click
+from .._compat import parse_requirements
+from ..cache import DependencyCache
+from ..exceptions import PipToolsError
+from ..locations import CACHE_DIR
+from ..logging import log
+from ..repositories import LocalRequirementsRepository, PyPIRepository
+from ..resolver import Resolver
+from ..utils import UNSAFE_PACKAGES, dedup, is_pinned_requirement, key_from_ireq
+from ..writer import OutputWriter
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.in"
+DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt"
+
+
+def _get_default_option(option_name):
+ """
+ Get default value of the pip's option (including option from pip.conf)
+ by a given option name.
+ """
+ install_command = create_command("install")
+ default_values = install_command.parser.get_default_values()
+ return getattr(default_values, option_name)
+
+
+class BaseCommand(Command):
+ _os_args = None
+
+ def parse_args(self, ctx, args):
+ """
+ Override base `parse_args` to store the argument part of `sys.argv`.
+ """
+ self._os_args = set(args)
+ return super(BaseCommand, self).parse_args(ctx, args)
+
+ def has_arg(self, arg_name):
+ """
+ Detect whether a given arg name (including negative counterparts
+ to the arg, e.g. --no-arg) is present in the argument part of `sys.argv`.
+ """
+ command_options = {option.name: option for option in self.params}
+ option = command_options[arg_name]
+ args = set(option.opts + option.secondary_opts)
+ return bool(self._os_args & args)
+
+
+@click.command(
+ cls=BaseCommand, context_settings={"help_option_names": ("-h", "--help")}
+)
+@click.version_option()
+@click.pass_context
+@click.option("-v", "--verbose", count=True, help="Show more output")
+@click.option("-q", "--quiet", count=True, help="Give less output")
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option(
+ "-p",
+ "--pre",
+ is_flag=True,
+ default=None,
+ help="Allow resolving to prereleases (default is not)",
+)
+@click.option(
+ "-r",
+ "--rebuild",
+ is_flag=True,
+ help="Clear any caches upfront, rebuild from scratch",
+)
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+)
+@click.option(
+ "-i",
+ "--index-url",
+ help="Change index URL (defaults to {index_url})".format(
+ index_url=redact_auth_from_url(_get_default_option("index_url"))
+ ),
+)
+@click.option(
+ "--extra-index-url", multiple=True, help="Add additional index URL to search"
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ help="Mark this host as trusted, even though it does not have "
+ "valid or any HTTPS.",
+)
+@click.option(
+ "--header/--no-header",
+ is_flag=True,
+ default=True,
+ help="Add header to generated file",
+)
+@click.option(
+ "--index/--no-index",
+ is_flag=True,
+ default=True,
+ help="DEPRECATED: Add index URL to generated file",
+)
+@click.option(
+ "--emit-trusted-host/--no-emit-trusted-host",
+ is_flag=True,
+ default=True,
+ help="Add trusted host option to generated file",
+)
+@click.option(
+ "--annotate/--no-annotate",
+ is_flag=True,
+ default=True,
+ help="Annotate results, indicating where dependencies come from",
+)
+@click.option(
+ "-U",
+ "--upgrade",
+ is_flag=True,
+ default=False,
+ help="Try to upgrade all dependencies to their latest versions",
+)
+@click.option(
+ "-P",
+ "--upgrade-package",
+ "upgrade_packages",
+ nargs=1,
+ multiple=True,
+ help="Specify particular packages to upgrade.",
+)
+@click.option(
+ "-o",
+ "--output-file",
+ nargs=1,
+ default=None,
+ type=click.File("w+b", atomic=True, lazy=True),
+ help=(
+ "Output file name. Required if more than one input file is given. "
+ "Will be derived from input file otherwise."
+ ),
+)
+@click.option(
+ "--allow-unsafe/--no-allow-unsafe",
+ is_flag=True,
+ default=False,
+ help=(
+ "Pin packages considered unsafe: {}.\n\n"
+ "WARNING: Future versions of pip-tools will enable this behavior by default. "
+ "Use --no-allow-unsafe to keep the old behavior. It is recommended to pass the "
+ "--allow-unsafe now to adapt to the upcoming change.".format(
+ ", ".join(sorted(UNSAFE_PACKAGES))
+ )
+ ),
+)
+@click.option(
+ "--generate-hashes",
+ is_flag=True,
+ default=False,
+ help="Generate pip 8 style hashes in the resulting requirements file.",
+)
+@click.option(
+ "--reuse-hashes/--no-reuse-hashes",
+ is_flag=True,
+ default=True,
+ help=(
+ "Improve the speed of --generate-hashes by reusing the hashes from an "
+ "existing output file."
+ ),
+)
+@click.option(
+ "--max-rounds",
+ default=10,
+ help="Maximum number of rounds before resolving the requirements aborts.",
+)
+@click.argument("src_files", nargs=-1, type=click.Path(exists=True, allow_dash=True))
+@click.option(
+ "--build-isolation/--no-build-isolation",
+ is_flag=True,
+ default=True,
+ help="Enable isolation when building a modern source distribution. "
+ "Build dependencies specified by PEP 518 must be already installed "
+ "if build isolation is disabled.",
+)
+@click.option(
+ "--emit-find-links/--no-emit-find-links",
+ is_flag=True,
+ default=True,
+ help="Add the find-links option to generated file",
+)
+@click.option(
+ "--cache-dir",
+ help="Store the cache data in DIRECTORY.",
+ default=CACHE_DIR,
+ show_default=True,
+ type=click.Path(file_okay=False, writable=True),
+)
+@click.option("--pip-args", help="Arguments to pass directly to the pip command.")
+@click.option(
+ "--emit-index-url/--no-emit-index-url",
+ is_flag=True,
+ default=True,
+ help="Add index URL to generated file",
+)
+def cli(
+ ctx,
+ verbose,
+ quiet,
+ dry_run,
+ pre,
+ rebuild,
+ find_links,
+ index_url,
+ extra_index_url,
+ cert,
+ client_cert,
+ trusted_host,
+ header,
+ index,
+ emit_trusted_host,
+ annotate,
+ upgrade,
+ upgrade_packages,
+ output_file,
+ allow_unsafe,
+ generate_hashes,
+ reuse_hashes,
+ src_files,
+ max_rounds,
+ build_isolation,
+ emit_find_links,
+ cache_dir,
+ pip_args,
+ emit_index_url,
+):
+ """Compiles requirements.txt from requirements.in specs."""
+ log.verbosity = verbose - quiet
+
+ if len(src_files) == 0:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ elif os.path.exists("setup.py"):
+ src_files = ("setup.py",)
+ else:
+ raise click.BadParameter(
+ (
+ "If you do not specify an input file, "
+ "the default is {} or setup.py"
+ ).format(DEFAULT_REQUIREMENTS_FILE)
+ )
+
+ if not output_file:
+ # An output file must be provided for stdin
+ if src_files == ("-",):
+ raise click.BadParameter("--output-file is required if input is from stdin")
+ # Use default requirements output file if there is a setup.py the source file
+ elif src_files == ("setup.py",):
+ file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE
+ # An output file must be provided if there are multiple source files
+ elif len(src_files) > 1:
+ raise click.BadParameter(
+ "--output-file is required if two or more input files are given."
+ )
+ # Otherwise derive the output file from the source file
+ else:
+ base_name = src_files[0].rsplit(".", 1)[0]
+ file_name = base_name + ".txt"
+
+ output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True)
+
+ # Close the file at the end of the context execution
+ ctx.call_on_close(safecall(output_file.close_intelligently))
+
+ if cli.has_arg("index") and cli.has_arg("emit_index_url"):
+ raise click.BadParameter(
+ "--index/--no-index and --emit-index-url/--no-emit-index-url "
+ "are mutually exclusive."
+ )
+ elif cli.has_arg("index"):
+ warnings.warn(
+ "--index and --no-index are deprecated and will be removed "
+ "in future versions. Use --emit-index-url/--no-emit-index-url instead.",
+ category=FutureWarning,
+ )
+ emit_index_url = index
+
+ ###
+ # Setup
+ ###
+
+ right_args = shlex.split(pip_args or "")
+ pip_args = []
+ for link in find_links:
+ pip_args.extend(["-f", link])
+ if index_url:
+ pip_args.extend(["-i", index_url])
+ for extra_index in extra_index_url:
+ pip_args.extend(["--extra-index-url", extra_index])
+ if cert:
+ pip_args.extend(["--cert", cert])
+ if client_cert:
+ pip_args.extend(["--client-cert", client_cert])
+ if pre:
+ pip_args.extend(["--pre"])
+ for host in trusted_host:
+ pip_args.extend(["--trusted-host", host])
+
+ if not build_isolation:
+ pip_args.append("--no-build-isolation")
+ pip_args.extend(right_args)
+
+ repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+
+ # Parse all constraints coming from --upgrade-package/-P
+ upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages)
+ upgrade_install_reqs = {
+ key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen
+ }
+
+ existing_pins_to_upgrade = set()
+
+ # Proxy with a LocalRequirementsRepository if --upgrade is not specified
+ # (= default invocation)
+ if not upgrade and os.path.exists(output_file.name):
+ # Use a temporary repository to ensure outdated(removed) options from
+ # existing requirements.txt wouldn't get into the current repository.
+ tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+ ireqs = parse_requirements(
+ output_file.name,
+ finder=tmp_repository.finder,
+ session=tmp_repository.session,
+ options=tmp_repository.options,
+ )
+
+ # Exclude packages from --upgrade-package/-P from the existing
+ # constraints, and separately gather pins to be upgraded
+ existing_pins = {}
+ for ireq in filter(is_pinned_requirement, ireqs):
+ key = key_from_ireq(ireq)
+ if key in upgrade_install_reqs:
+ existing_pins_to_upgrade.add(key)
+ else:
+ existing_pins[key] = ireq
+ repository = LocalRequirementsRepository(
+ existing_pins, repository, reuse_hashes=reuse_hashes
+ )
+
+ ###
+ # Parsing/collecting initial requirements
+ ###
+
+ constraints = []
+ for src_file in src_files:
+ is_setup_file = os.path.basename(src_file) == "setup.py"
+ if is_setup_file or src_file == "-":
+ # pip requires filenames and not files. Since we want to support
+ # piping from stdin, we need to briefly save the input from stdin
+ # to a temporary file and have pip read that. also used for
+ # reading requirements from install_requires in setup.py.
+ tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ if is_setup_file:
+ from distutils.core import run_setup
+
+ dist = run_setup(src_file)
+ tmpfile.write("\n".join(dist.install_requires))
+ comes_from = "{name} ({filename})".format(
+ name=dist.get_name(), filename=src_file
+ )
+ else:
+ tmpfile.write(sys.stdin.read())
+ comes_from = "-r -"
+ tmpfile.flush()
+ reqs = list(
+ parse_requirements(
+ tmpfile.name,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+ for req in reqs:
+ req.comes_from = comes_from
+ constraints.extend(reqs)
+ else:
+ constraints.extend(
+ parse_requirements(
+ src_file,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+
+ primary_packages = {
+ key_from_ireq(ireq) for ireq in constraints if not ireq.constraint
+ }
+
+ allowed_upgrades = primary_packages | existing_pins_to_upgrade
+ constraints.extend(
+ ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades
+ )
+
+ # Filter out pip environment markers which do not match (PEP496)
+ constraints = [
+ req for req in constraints if req.markers is None or req.markers.evaluate()
+ ]
+
+ log.debug("Using indexes:")
+ with log.indentation():
+ for index_url in dedup(repository.finder.index_urls):
+ log.debug(redact_auth_from_url(index_url))
+
+ if repository.finder.find_links:
+ log.debug("")
+ log.debug("Using links:")
+ with log.indentation():
+ for find_link in dedup(repository.finder.find_links):
+ log.debug(redact_auth_from_url(find_link))
+
+ try:
+ resolver = Resolver(
+ constraints,
+ repository,
+ prereleases=repository.finder.allow_all_prereleases or pre,
+ cache=DependencyCache(cache_dir),
+ clear_caches=rebuild,
+ allow_unsafe=allow_unsafe,
+ )
+ results = resolver.resolve(max_rounds=max_rounds)
+ if generate_hashes:
+ hashes = resolver.resolve_hashes(results)
+ else:
+ hashes = None
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ log.debug("")
+
+ ##
+ # Output
+ ##
+
+ writer = OutputWriter(
+ src_files,
+ output_file,
+ click_ctx=ctx,
+ dry_run=dry_run,
+ emit_header=header,
+ emit_index_url=emit_index_url,
+ emit_trusted_host=emit_trusted_host,
+ annotate=annotate,
+ generate_hashes=generate_hashes,
+ default_index_url=repository.DEFAULT_INDEX_URL,
+ index_urls=repository.finder.index_urls,
+ trusted_hosts=repository.finder.trusted_hosts,
+ format_control=repository.finder.format_control,
+ allow_unsafe=allow_unsafe,
+ find_links=repository.finder.find_links,
+ emit_find_links=emit_find_links,
+ )
+ writer.write(
+ results=results,
+ unsafe_requirements=resolver.unsafe_constraints,
+ markers={
+ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers
+ },
+ hashes=hashes,
+ )
+
+ if dry_run:
+ log.info("Dry-run, so nothing updated.")
diff --git a/third_party/python/pip_tools/piptools/scripts/sync.py b/third_party/python/pip_tools/piptools/scripts/sync.py
new file mode 100644
index 0000000000..9759b302f0
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/scripts/sync.py
@@ -0,0 +1,214 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import itertools
+import os
+import shlex
+import sys
+
+from pip._internal.commands import create_command
+from pip._internal.utils.misc import get_installed_distributions
+
+from .. import click, sync
+from .._compat import parse_requirements
+from ..exceptions import PipToolsError
+from ..logging import log
+from ..repositories import PyPIRepository
+from ..utils import flat_map
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
+
+
+@click.command(context_settings={"help_option_names": ("-h", "--help")})
+@click.version_option()
+@click.option(
+ "-a",
+ "--ask",
+ is_flag=True,
+ help="Show what would happen, then ask whether to continue",
+)
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option("--force", is_flag=True, help="Proceed even if conflicts are found")
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+)
+@click.option("-i", "--index-url", help="Change index URL (defaults to PyPI)")
+@click.option(
+ "--extra-index-url", multiple=True, help="Add additional index URL to search"
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ help="Mark this host as trusted, even though it does not have valid or any HTTPS.",
+)
+@click.option(
+ "--no-index",
+ is_flag=True,
+ help="Ignore package index (only looking at --find-links URLs instead)",
+)
+@click.option("-v", "--verbose", count=True, help="Show more output")
+@click.option("-q", "--quiet", count=True, help="Give less output")
+@click.option(
+ "--user", "user_only", is_flag=True, help="Restrict attention to user directory"
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
+@click.option("--pip-args", help="Arguments to pass directly to pip install.")
+def cli(
+ ask,
+ dry_run,
+ force,
+ find_links,
+ index_url,
+ extra_index_url,
+ trusted_host,
+ no_index,
+ verbose,
+ quiet,
+ user_only,
+ cert,
+ client_cert,
+ src_files,
+ pip_args,
+):
+ """Synchronize virtual environment with requirements.txt."""
+ log.verbosity = verbose - quiet
+
+ if not src_files:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ else:
+ msg = "No requirement files given and no {} found in the current directory"
+ log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))
+ sys.exit(2)
+
+ if any(src_file.endswith(".in") for src_file in src_files):
+ msg = (
+ "Some input files have the .in extension, which is most likely an error "
+ "and can cause weird behaviour. You probably meant to use "
+ "the corresponding *.txt file?"
+ )
+ if force:
+ log.warning("WARNING: " + msg)
+ else:
+ log.error("ERROR: " + msg)
+ sys.exit(2)
+
+ install_command = create_command("install")
+ options, _ = install_command.parse_args([])
+ session = install_command._build_session(options)
+ finder = install_command._build_package_finder(options=options, session=session)
+
+ # Parse requirements file. Note, all options inside requirements file
+ # will be collected by the finder.
+ requirements = flat_map(
+ lambda src: parse_requirements(src, finder=finder, session=session), src_files
+ )
+
+ try:
+ requirements = sync.merge(requirements, ignore_conflicts=force)
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ installed_dists = get_installed_distributions(skip=[], user_only=user_only)
+ to_install, to_uninstall = sync.diff(requirements, installed_dists)
+
+ install_flags = (
+ _compose_install_flags(
+ finder,
+ no_index=no_index,
+ index_url=index_url,
+ extra_index_url=extra_index_url,
+ trusted_host=trusted_host,
+ find_links=find_links,
+ user_only=user_only,
+ cert=cert,
+ client_cert=client_cert,
+ )
+ + shlex.split(pip_args or "")
+ )
+ sys.exit(
+ sync.sync(
+ to_install,
+ to_uninstall,
+ dry_run=dry_run,
+ install_flags=install_flags,
+ ask=ask,
+ )
+ )
+
+
+def _compose_install_flags(
+ finder,
+ no_index=False,
+ index_url=None,
+ extra_index_url=None,
+ trusted_host=None,
+ find_links=None,
+ user_only=False,
+ cert=None,
+ client_cert=None,
+):
+ """
+ Compose install flags with the given finder and CLI options.
+ """
+ result = []
+
+ # Build --index-url/--extra-index-url/--no-index
+ if no_index:
+ result.append("--no-index")
+ elif index_url:
+ result.extend(["--index-url", index_url])
+ elif finder.index_urls:
+ finder_index_url = finder.index_urls[0]
+ if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL:
+ result.extend(["--index-url", finder_index_url])
+ for extra_index in finder.index_urls[1:]:
+ result.extend(["--extra-index-url", extra_index])
+ else:
+ result.append("--no-index")
+
+ for extra_index in extra_index_url:
+ result.extend(["--extra-index-url", extra_index])
+
+ # Build --trusted-hosts
+ for host in itertools.chain(trusted_host, finder.trusted_hosts):
+ result.extend(["--trusted-host", host])
+
+ # Build --find-links
+ for link in itertools.chain(find_links, finder.find_links):
+ result.extend(["--find-links", link])
+
+ # Build format controls --no-binary/--only-binary
+ for format_control in ("no_binary", "only_binary"):
+ formats = getattr(finder.format_control, format_control)
+ if not formats:
+ continue
+ result.extend(
+ ["--" + format_control.replace("_", "-"), ",".join(sorted(formats))]
+ )
+
+ if user_only:
+ result.append("--user")
+
+ if cert:
+ result.extend(["--cert", cert])
+
+ if client_cert:
+ result.extend(["--client-cert", client_cert])
+
+ return result
diff --git a/third_party/python/pip_tools/piptools/sync.py b/third_party/python/pip_tools/piptools/sync.py
new file mode 100644
index 0000000000..9967682c7d
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/sync.py
@@ -0,0 +1,216 @@
+import collections
+import os
+import sys
+import tempfile
+from subprocess import check_call # nosec
+
+from pip._internal.commands.freeze import DEV_PKGS
+from pip._internal.utils.compat import stdlib_pkgs
+
+from . import click
+from .exceptions import IncompatibleRequirements
+from .logging import log
+from .utils import (
+ flat_map,
+ format_requirement,
+ get_hashes_from_ireq,
+ is_url_requirement,
+ key_from_ireq,
+ key_from_req,
+)
+
+PACKAGES_TO_IGNORE = (
+ ["-markerlib", "pip", "pip-tools", "pip-review", "pkg-resources"]
+ + list(stdlib_pkgs)
+ + list(DEV_PKGS)
+)
+
+
+def dependency_tree(installed_keys, root_key):
+ """
+ Calculate the dependency tree for the package `root_key` and return
+ a collection of all its dependencies. Uses a DFS traversal algorithm.
+
+ `installed_keys` should be a {key: requirement} mapping, e.g.
+ {'django': from_line('django==1.8')}
+ `root_key` should be the key to return the dependency tree for.
+ """
+ dependencies = set()
+ queue = collections.deque()
+
+ if root_key in installed_keys:
+ dep = installed_keys[root_key]
+ queue.append(dep)
+
+ while queue:
+ v = queue.popleft()
+ key = key_from_req(v)
+ if key in dependencies:
+ continue
+
+ dependencies.add(key)
+
+ for dep_specifier in v.requires():
+ dep_name = key_from_req(dep_specifier)
+ if dep_name in installed_keys:
+ dep = installed_keys[dep_name]
+
+ if dep_specifier.specifier.contains(dep.version):
+ queue.append(dep)
+
+ return dependencies
+
+
+def get_dists_to_ignore(installed):
+ """
+ Returns a collection of package names to ignore when performing pip-sync,
+ based on the currently installed environment. For example, when pip-tools
+ is installed in the local environment, it should be ignored, including all
+ of its dependencies (e.g. click). When pip-tools is not installed
+ locally, click should also be installed/uninstalled depending on the given
+ requirements.
+ """
+ installed_keys = {key_from_req(r): r for r in installed}
+ return list(
+ flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE)
+ )
+
+
+def merge(requirements, ignore_conflicts):
+ by_key = {}
+
+ for ireq in requirements:
+ # Limitation: URL requirements are merged by precise string match, so
+ # "file:///example.zip#egg=example", "file:///example.zip", and
+ # "example==1.0" will not merge with each other
+ if ireq.match_markers():
+ key = key_from_ireq(ireq)
+
+ if not ignore_conflicts:
+ existing_ireq = by_key.get(key)
+ if existing_ireq:
+ # NOTE: We check equality here since we can assume that the
+ # requirements are all pinned
+ if ireq.specifier != existing_ireq.specifier:
+ raise IncompatibleRequirements(ireq, existing_ireq)
+
+ # TODO: Always pick the largest specifier in case of a conflict
+ by_key[key] = ireq
+ return by_key.values()
+
+
+def diff_key_from_ireq(ireq):
+ """
+ Calculate a key for comparing a compiled requirement with installed modules.
+ For URL requirements, only provide a useful key if the url includes
+ #egg=name==version, which will set ireq.req.name and ireq.specifier.
+ Otherwise return ireq.link so the key will not match and the package will
+ reinstall. Reinstall is necessary to ensure that packages will reinstall
+ if the URL is changed but the version is not.
+ """
+ if is_url_requirement(ireq):
+ if (
+ ireq.req
+ and (getattr(ireq.req, "key", None) or getattr(ireq.req, "name", None))
+ and ireq.specifier
+ ):
+ return key_from_ireq(ireq)
+ return str(ireq.link)
+ return key_from_ireq(ireq)
+
+
+def diff(compiled_requirements, installed_dists):
+ """
+ Calculate which packages should be installed or uninstalled, given a set
+ of compiled requirements and a list of currently installed modules.
+ """
+ requirements_lut = {diff_key_from_ireq(r): r for r in compiled_requirements}
+
+ satisfied = set() # holds keys
+ to_install = set() # holds InstallRequirement objects
+ to_uninstall = set() # holds keys
+
+ pkgs_to_ignore = get_dists_to_ignore(installed_dists)
+ for dist in installed_dists:
+ key = key_from_req(dist)
+ if key not in requirements_lut or not requirements_lut[key].match_markers():
+ to_uninstall.add(key)
+ elif requirements_lut[key].specifier.contains(dist.version):
+ satisfied.add(key)
+
+ for key, requirement in requirements_lut.items():
+ if key not in satisfied and requirement.match_markers():
+ to_install.add(requirement)
+
+ # Make sure to not uninstall any packages that should be ignored
+ to_uninstall -= set(pkgs_to_ignore)
+
+ return (to_install, to_uninstall)
+
+
+def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False):
+ """
+ Install and uninstalls the given sets of modules.
+ """
+ exit_code = 0
+
+ if not to_uninstall and not to_install:
+ log.info("Everything up-to-date", err=False)
+ return exit_code
+
+ pip_flags = []
+ if log.verbosity < 0:
+ pip_flags += ["-q"]
+
+ if ask:
+ dry_run = True
+
+ if dry_run:
+ if to_uninstall:
+ click.echo("Would uninstall:")
+ for pkg in sorted(to_uninstall):
+ click.echo(" {}".format(pkg))
+
+ if to_install:
+ click.echo("Would install:")
+ for ireq in sorted(to_install, key=key_from_ireq):
+ click.echo(" {}".format(format_requirement(ireq)))
+
+ exit_code = 1
+
+ if ask and click.confirm("Would you like to proceed with these changes?"):
+ dry_run = False
+ exit_code = 0
+
+ if not dry_run:
+ if to_uninstall:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "uninstall", "-y"]
+ + pip_flags
+ + sorted(to_uninstall)
+ )
+
+ if to_install:
+ if install_flags is None:
+ install_flags = []
+ # prepare requirement lines
+ req_lines = []
+ for ireq in sorted(to_install, key=key_from_ireq):
+ ireq_hashes = get_hashes_from_ireq(ireq)
+ req_lines.append(format_requirement(ireq, hashes=ireq_hashes))
+
+ # save requirement lines to a temporary file
+ tmp_req_file = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ tmp_req_file.write("\n".join(req_lines))
+ tmp_req_file.close()
+
+ try:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "install", "-r", tmp_req_file.name]
+ + pip_flags
+ + install_flags
+ )
+ finally:
+ os.unlink(tmp_req_file.name)
+
+ return exit_code
diff --git a/third_party/python/pip_tools/piptools/utils.py b/third_party/python/pip_tools/piptools/utils.py
new file mode 100644
index 0000000000..4b20ba6e38
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/utils.py
@@ -0,0 +1,384 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import sys
+from collections import OrderedDict
+from itertools import chain
+
+from click.utils import LazyFile
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.vcs import is_url
+from pip._vendor import six
+from pip._vendor.six.moves import shlex_quote
+
+from .click import style
+
+UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"}
+COMPILE_EXCLUDE_OPTIONS = {
+ "--dry-run",
+ "--quiet",
+ "--rebuild",
+ "--upgrade",
+ "--upgrade-package",
+ "--verbose",
+ "--cache-dir",
+ "--no-reuse-hashes",
+}
+
+
+def key_from_ireq(ireq):
+ """Get a standardized key for an InstallRequirement."""
+ if ireq.req is None and ireq.link is not None:
+ return str(ireq.link)
+ else:
+ return key_from_req(ireq.req)
+
+
+def key_from_req(req):
+ """Get an all-lowercase version of the requirement's name."""
+ if hasattr(req, "key"):
+ # from pkg_resources, such as installed dists for pip-sync
+ key = req.key
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ key = req.name
+
+ key = key.replace("_", "-").lower()
+ return key
+
+
+def comment(text):
+ return style(text, fg="green")
+
+
+def make_install_requirement(name, version, extras, constraint=False):
+ # If no extras are specified, the extras string is blank
+ extras_string = ""
+ if extras:
+ # Sort extras for stability
+ extras_string = "[{}]".format(",".join(sorted(extras)))
+
+ return install_req_from_line(
+ str("{}{}=={}".format(name, extras_string, version)), constraint=constraint
+ )
+
+
+def is_url_requirement(ireq):
+ """
+ Return True if requirement was specified as a path or URL.
+ ireq.original_link will have been set by InstallRequirement.__init__
+ """
+ return bool(ireq.original_link)
+
+
+def format_requirement(ireq, marker=None, hashes=None):
+ """
+ Generic formatter for pretty printing InstallRequirements to the terminal
+ in a less verbose way than using its `__str__` method.
+ """
+ if ireq.editable:
+ line = "-e {}".format(ireq.link.url)
+ elif is_url_requirement(ireq):
+ line = ireq.link.url
+ else:
+ line = str(ireq.req).lower()
+
+ if marker:
+ line = "{} ; {}".format(line, marker)
+
+ if hashes:
+ for hash_ in sorted(hashes):
+ line += " \\\n --hash={}".format(hash_)
+
+ return line
+
+
+def format_specifier(ireq):
+ """
+ Generic formatter for pretty printing the specifier part of
+ InstallRequirements to the terminal.
+ """
+ # TODO: Ideally, this is carried over to the pip library itself
+ specs = ireq.specifier if ireq.req is not None else []
+ specs = sorted(specs, key=lambda x: x.version)
+ return ",".join(str(s) for s in specs) or "<any>"
+
+
+def is_pinned_requirement(ireq):
+ """
+ Returns whether an InstallRequirement is a "pinned" requirement.
+
+ An InstallRequirement is considered pinned if:
+
+ - Is not editable
+ - It has exactly one specifier
+ - That specifier is "=="
+ - The version does not contain a wildcard
+
+ Examples:
+ django==1.8 # pinned
+ django>1.8 # NOT pinned
+ django~=1.8 # NOT pinned
+ django==1.* # NOT pinned
+ """
+ if ireq.editable:
+ return False
+
+ if ireq.req is None or len(ireq.specifier) != 1:
+ return False
+
+ spec = next(iter(ireq.specifier))
+ return spec.operator in {"==", "==="} and not spec.version.endswith(".*")
+
+
+def as_tuple(ireq):
+ """
+ Pulls out the (name: str, version:str, extras:(str)) tuple from
+ the pinned InstallRequirement.
+ """
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq))
+
+ name = key_from_ireq(ireq)
+ version = next(iter(ireq.specifier)).version
+ extras = tuple(sorted(ireq.extras))
+ return name, version, extras
+
+
+def flat_map(fn, collection):
+ """Map a function over a collection and flatten the result by one-level"""
+ return chain.from_iterable(map(fn, collection))
+
+
+def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):
+ """
+ Builds a dict-based lookup table (index) elegantly.
+
+ Supports building normal and unique lookup tables. For example:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {
+ ... 'b': {'bar', 'baz'},
+ ... 'f': {'foo'},
+ ... 'q': {'quux', 'qux'}
+ ... }
+
+ For key functions that uniquely identify values, set unique=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... unique=True) == {
+ ... 'b': 'baz',
+ ... 'f': 'foo',
+ ... 'q': 'quux'
+ ... }
+
+ For the values represented as lists, set use_lists=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... use_lists=True) == {
+ ... 'b': ['bar', 'baz'],
+ ... 'f': ['foo'],
+ ... 'q': ['qux', 'quux']
+ ... }
+
+ The values of the resulting lookup table will be lists, not sets.
+
+ For extra power, you can even change the values while building up the LUT.
+ To do so, use the `keyval` function instead of the `key` arg:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'],
+ ... keyval=lambda s: (s[0], s[1:])) == {
+ ... 'b': {'ar', 'az'},
+ ... 'f': {'oo'},
+ ... 'q': {'uux', 'ux'}
+ ... }
+
+ """
+ if keyval is None:
+ if key is None:
+
+ def keyval(v):
+ return v
+
+ else:
+
+ def keyval(v):
+ return (key(v), v)
+
+ if unique:
+ return dict(keyval(v) for v in values)
+
+ lut = {}
+ for value in values:
+ k, v = keyval(value)
+ try:
+ s = lut[k]
+ except KeyError:
+ if use_lists:
+ s = lut[k] = list()
+ else:
+ s = lut[k] = set()
+ if use_lists:
+ s.append(v)
+ else:
+ s.add(v)
+ return dict(lut)
+
+
+def dedup(iterable):
+ """Deduplicate an iterable object like iter(set(iterable)) but
+ order-preserved.
+ """
+ return iter(OrderedDict.fromkeys(iterable))
+
+
+def name_from_req(req):
+ """Get the name of the requirement"""
+ if hasattr(req, "project_name"):
+ # from pkg_resources, such as installed dists for pip-sync
+ return req.project_name
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ return req.name
+
+
+def fs_str(string):
+ """
+ Convert given string to a correctly encoded filesystem string.
+
+ On Python 2, if the input string is unicode, converts it to bytes
+ encoded with the filesystem encoding.
+
+ On Python 3 returns the string as is, since Python 3 uses unicode
+ paths and the input string shouldn't be bytes.
+
+ :type string: str|unicode
+ :rtype: str
+ """
+ if isinstance(string, str):
+ return string
+ if isinstance(string, bytes):
+ raise TypeError("fs_str() argument must not be bytes")
+ return string.encode(_fs_encoding)
+
+
+_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+
+
+def get_hashes_from_ireq(ireq):
+ """
+ Given an InstallRequirement, return a list of string hashes in
+ the format "{algorithm}:{hash}". Return an empty list if there are no hashes
+ in the requirement options.
+ """
+ result = []
+ for algorithm, hexdigests in ireq.hash_options.items():
+ for hash_ in hexdigests:
+ result.append("{}:{}".format(algorithm, hash_))
+ return result
+
+
+def force_text(s):
+ """
+ Return a string representing `s`.
+ """
+ if s is None:
+ return ""
+ if not isinstance(s, six.string_types):
+ return six.text_type(s)
+ return s
+
+
+def get_compile_command(click_ctx):
+ """
+ Returns a normalized compile command depending on cli context.
+
+ The command will be normalized by:
+ - expanding options short to long
+ - removing values that are already default
+ - sorting the arguments
+ - removing one-off arguments like '--upgrade'
+ - removing arguments that don't change build behaviour like '--verbose'
+ """
+ from piptools.scripts.compile import cli
+
+ # Map of the compile cli options (option name -> click.Option)
+ compile_options = {option.name: option for option in cli.params}
+
+ left_args = []
+ right_args = []
+
+ for option_name, value in click_ctx.params.items():
+ option = compile_options[option_name]
+
+ # Collect variadic args separately, they will be added
+ # at the end of the command later
+ if option.nargs < 0:
+ # These will necessarily be src_files
+ # Re-add click-stripped '--' if any start with '-'
+ if any(val.startswith("-") and val != "-" for val in value):
+ right_args.append("--")
+ right_args.extend([shlex_quote(force_text(val)) for val in value])
+ continue
+
+ # Get the latest option name (usually it'll be a long name)
+ option_long_name = option.opts[-1]
+
+ # Exclude one-off options (--upgrade/--upgrade-package/--rebuild/...)
+ # or options that don't change compile behaviour (--verbose/--dry-run/...)
+ if option_long_name in COMPILE_EXCLUDE_OPTIONS:
+ continue
+
+ # Skip options without a value
+ if option.default is None and not value:
+ continue
+
+ # Skip options with a default value
+ if option.default == value:
+ continue
+
+ # Use a file name for file-like objects
+ if isinstance(value, LazyFile):
+ value = value.name
+
+ # Convert value to the list
+ if not isinstance(value, (tuple, list)):
+ value = [value]
+
+ for val in value:
+ # Flags don't have a value, thus add to args true or false option long name
+ if option.is_flag:
+ # If there are false-options, choose an option name depending on a value
+ if option.secondary_opts:
+ # Get the latest false-option
+ secondary_option_long_name = option.secondary_opts[-1]
+ arg = option_long_name if val else secondary_option_long_name
+ # There are no false-options, use true-option
+ else:
+ arg = option_long_name
+ left_args.append(shlex_quote(arg))
+ # Append to args the option with a value
+ else:
+ if isinstance(val, six.string_types) and is_url(val):
+ val = redact_auth_from_url(val)
+ if option.name == "pip_args":
+ # shlex_quote would produce functional but noisily quoted results,
+ # e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"''
+ # Instead, we try to get more legible quoting via repr:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=repr(fs_str(force_text(val)))
+ )
+ )
+ else:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=shlex_quote(force_text(val))
+ )
+ )
+
+ return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args))
diff --git a/third_party/python/pip_tools/piptools/writer.py b/third_party/python/pip_tools/piptools/writer.py
new file mode 100644
index 0000000000..515df198eb
--- /dev/null
+++ b/third_party/python/pip_tools/piptools/writer.py
@@ -0,0 +1,243 @@
+from __future__ import unicode_literals
+
+import os
+import re
+from itertools import chain
+
+from pip._vendor import six
+
+from .click import unstyle
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ comment,
+ dedup,
+ format_requirement,
+ get_compile_command,
+ key_from_ireq,
+)
+
+MESSAGE_UNHASHED_PACKAGE = comment(
+ "# WARNING: pip install will require the following package to be hashed."
+ "\n# Consider using a hashable URL like "
+ "https://github.com/jazzband/pip-tools/archive/SOMECOMMIT.zip"
+)
+
+MESSAGE_UNSAFE_PACKAGES_UNPINNED = comment(
+ "# WARNING: The following packages were not pinned, but pip requires them to be"
+ "\n# pinned when the requirements file includes hashes. "
+ "Consider using the --allow-unsafe flag."
+)
+
+MESSAGE_UNSAFE_PACKAGES = comment(
+ "# The following packages are considered to be unsafe in a requirements file:"
+)
+
+MESSAGE_UNINSTALLABLE = (
+ "The generated requirements file may be rejected by pip install. "
+ "See # WARNING lines for details."
+)
+
+
+strip_comes_from_line_re = re.compile(r" \(line \d+\)$")
+
+
+def _comes_from_as_string(ireq):
+ if isinstance(ireq.comes_from, six.string_types):
+ return strip_comes_from_line_re.sub("", ireq.comes_from)
+ return key_from_ireq(ireq.comes_from)
+
+
+class OutputWriter(object):
+ def __init__(
+ self,
+ src_files,
+ dst_file,
+ click_ctx,
+ dry_run,
+ emit_header,
+ emit_index_url,
+ emit_trusted_host,
+ annotate,
+ generate_hashes,
+ default_index_url,
+ index_urls,
+ trusted_hosts,
+ format_control,
+ allow_unsafe,
+ find_links,
+ emit_find_links,
+ ):
+ self.src_files = src_files
+ self.dst_file = dst_file
+ self.click_ctx = click_ctx
+ self.dry_run = dry_run
+ self.emit_header = emit_header
+ self.emit_index_url = emit_index_url
+ self.emit_trusted_host = emit_trusted_host
+ self.annotate = annotate
+ self.generate_hashes = generate_hashes
+ self.default_index_url = default_index_url
+ self.index_urls = index_urls
+ self.trusted_hosts = trusted_hosts
+ self.format_control = format_control
+ self.allow_unsafe = allow_unsafe
+ self.find_links = find_links
+ self.emit_find_links = emit_find_links
+
+ def _sort_key(self, ireq):
+ return (not ireq.editable, str(ireq.req).lower())
+
+ def write_header(self):
+ if self.emit_header:
+ yield comment("#")
+ yield comment("# This file is autogenerated by pip-compile")
+ yield comment("# To update, run:")
+ yield comment("#")
+ compile_command = os.environ.get(
+ "CUSTOM_COMPILE_COMMAND"
+ ) or get_compile_command(self.click_ctx)
+ yield comment("# {}".format(compile_command))
+ yield comment("#")
+
+ def write_index_options(self):
+ if self.emit_index_url:
+ for index, index_url in enumerate(dedup(self.index_urls)):
+ if index_url.rstrip("/") == self.default_index_url:
+ continue
+ flag = "--index-url" if index == 0 else "--extra-index-url"
+ yield "{} {}".format(flag, index_url)
+
+ def write_trusted_hosts(self):
+ if self.emit_trusted_host:
+ for trusted_host in dedup(self.trusted_hosts):
+ yield "--trusted-host {}".format(trusted_host)
+
+ def write_format_controls(self):
+ for nb in dedup(sorted(self.format_control.no_binary)):
+ yield "--no-binary {}".format(nb)
+ for ob in dedup(sorted(self.format_control.only_binary)):
+ yield "--only-binary {}".format(ob)
+
+ def write_find_links(self):
+ if self.emit_find_links:
+ for find_link in dedup(self.find_links):
+ yield "--find-links {}".format(find_link)
+
+ def write_flags(self):
+ emitted = False
+ for line in chain(
+ self.write_index_options(),
+ self.write_find_links(),
+ self.write_trusted_hosts(),
+ self.write_format_controls(),
+ ):
+ emitted = True
+ yield line
+ if emitted:
+ yield ""
+
+ def _iter_lines(self, results, unsafe_requirements=None, markers=None, hashes=None):
+ # default values
+ unsafe_requirements = unsafe_requirements or []
+ markers = markers or {}
+ hashes = hashes or {}
+
+ # Check for unhashed or unpinned packages if at least one package does have
+ # hashes, which will trigger pip install's --require-hashes mode.
+ warn_uninstallable = False
+ has_hashes = hashes and any(hash for hash in hashes.values())
+
+ yielded = False
+
+ for line in self.write_header():
+ yield line
+ yielded = True
+ for line in self.write_flags():
+ yield line
+ yielded = True
+
+ unsafe_requirements = (
+ {r for r in results if r.name in UNSAFE_PACKAGES}
+ if not unsafe_requirements
+ else unsafe_requirements
+ )
+ packages = {r for r in results if r.name not in UNSAFE_PACKAGES}
+
+ if packages:
+ packages = sorted(packages, key=self._sort_key)
+ for ireq in packages:
+ if has_hashes and not hashes.get(ireq):
+ yield MESSAGE_UNHASHED_PACKAGE
+ warn_uninstallable = True
+ line = self._format_requirement(
+ ireq, markers.get(key_from_ireq(ireq)), hashes=hashes
+ )
+ yield line
+ yielded = True
+
+ if unsafe_requirements:
+ unsafe_requirements = sorted(unsafe_requirements, key=self._sort_key)
+ yield ""
+ yielded = True
+ if has_hashes and not self.allow_unsafe:
+ yield MESSAGE_UNSAFE_PACKAGES_UNPINNED
+ warn_uninstallable = True
+ else:
+ yield MESSAGE_UNSAFE_PACKAGES
+
+ for ireq in unsafe_requirements:
+ ireq_key = key_from_ireq(ireq)
+ if not self.allow_unsafe:
+ yield comment("# {}".format(ireq_key))
+ else:
+ line = self._format_requirement(
+ ireq, marker=markers.get(ireq_key), hashes=hashes
+ )
+ yield line
+
+ # Yield even when there's no real content, so that blank files are written
+ if not yielded:
+ yield ""
+
+ if warn_uninstallable:
+ log.warning(MESSAGE_UNINSTALLABLE)
+
+ def write(self, results, unsafe_requirements, markers, hashes):
+
+ for line in self._iter_lines(results, unsafe_requirements, markers, hashes):
+ log.info(line)
+ if not self.dry_run:
+ self.dst_file.write(unstyle(line).encode("utf-8"))
+ self.dst_file.write(os.linesep.encode("utf-8"))
+
+ def _format_requirement(self, ireq, marker=None, hashes=None):
+ ireq_hashes = (hashes if hashes is not None else {}).get(ireq)
+
+ line = format_requirement(ireq, marker=marker, hashes=ireq_hashes)
+
+ if not self.annotate:
+ return line
+
+ # Annotate what packages or reqs-ins this package is required by
+ required_by = set()
+ if hasattr(ireq, "_source_ireqs"):
+ required_by |= {
+ _comes_from_as_string(src_ireq)
+ for src_ireq in ireq._source_ireqs
+ if src_ireq.comes_from
+ }
+ elif ireq.comes_from:
+ required_by.add(_comes_from_as_string(ireq))
+ if required_by:
+ required_by = sorted(required_by)
+ if len(required_by) == 1:
+ source = required_by[0]
+ annotation = " # via " + source
+ else:
+ annotation_lines = [" # via"]
+ for source in required_by:
+ annotation_lines.append(" # " + source)
+ annotation = "\n".join(annotation_lines)
+ line = "{}\n{}".format(line, comment(annotation))
+ return line