summaryrefslogtreecommitdiffstats
path: root/third_party/python/urllib3
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/python/urllib3')
-rw-r--r--third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD44
-rw-r--r--third_party/python/urllib3/urllib3-1.26.17.dist-info/LICENSE.txt (renamed from third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt)0
-rw-r--r--third_party/python/urllib3/urllib3-1.26.17.dist-info/METADATA (renamed from third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA)177
-rw-r--r--third_party/python/urllib3/urllib3-1.26.17.dist-info/RECORD44
-rw-r--r--third_party/python/urllib3/urllib3-1.26.17.dist-info/WHEEL (renamed from third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL)2
-rw-r--r--third_party/python/urllib3/urllib3-1.26.17.dist-info/top_level.txt (renamed from third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt)0
-rw-r--r--third_party/python/urllib3/urllib3/__init__.py17
-rw-r--r--third_party/python/urllib3/urllib3/_version.py2
-rw-r--r--third_party/python/urllib3/urllib3/connection.py62
-rw-r--r--third_party/python/urllib3/urllib3/connectionpool.py97
-rw-r--r--third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py2
-rw-r--r--third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py1
-rw-r--r--third_party/python/urllib3/urllib3/contrib/appengine.py4
-rw-r--r--third_party/python/urllib3/urllib3/contrib/ntlmpool.py13
-rw-r--r--third_party/python/urllib3/urllib3/contrib/pyopenssl.py19
-rw-r--r--third_party/python/urllib3/urllib3/contrib/securetransport.py5
-rw-r--r--third_party/python/urllib3/urllib3/contrib/socks.py2
-rw-r--r--third_party/python/urllib3/urllib3/exceptions.py12
-rw-r--r--third_party/python/urllib3/urllib3/packages/__init__.py5
-rw-r--r--third_party/python/urllib3/urllib3/packages/backports/weakref_finalize.py155
-rw-r--r--third_party/python/urllib3/urllib3/packages/six.py125
-rw-r--r--third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py22
-rw-r--r--third_party/python/urllib3/urllib3/poolmanager.py3
-rw-r--r--third_party/python/urllib3/urllib3/request.py21
-rw-r--r--third_party/python/urllib3/urllib3/response.py72
-rw-r--r--third_party/python/urllib3/urllib3/util/connection.py5
-rw-r--r--third_party/python/urllib3/urllib3/util/proxy.py1
-rw-r--r--third_party/python/urllib3/urllib3/util/request.py5
-rw-r--r--third_party/python/urllib3/urllib3/util/retry.py37
-rw-r--r--third_party/python/urllib3/urllib3/util/ssl_.py53
-rw-r--r--third_party/python/urllib3/urllib3/util/ssl_match_hostname.py (renamed from third_party/python/urllib3/urllib3/packages/ssl_match_hostname/_implementation.py)15
-rw-r--r--third_party/python/urllib3/urllib3/util/ssltransport.py6
-rw-r--r--third_party/python/urllib3/urllib3/util/timeout.py9
-rw-r--r--third_party/python/urllib3/urllib3/util/url.py17
-rw-r--r--third_party/python/urllib3/urllib3/util/wait.py1
35 files changed, 841 insertions, 214 deletions
diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
deleted file mode 100644
index ec9088a111..0000000000
--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
+++ /dev/null
@@ -1,44 +0,0 @@
-urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
-urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
-urllib3/_version.py,sha256=H0vLQ8PY350EPZlZQa8ri0tEjVS-xhGdQOHcU360-0A,63
-urllib3/connection.py,sha256=BdaUSNpGzO0zq28i9MhOXb6QZspeVdVrYtjnkk2Eqg4,18396
-urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133
-urllib3/exceptions.py,sha256=lNrKC5J8zeBXIu9SSKSNb7cLi8iXl9ARu9DHD2SflZM,7810
-urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
-urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
-urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
-urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
-urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
-urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
-urllib3/contrib/appengine.py,sha256=7Pxb0tKfDB_LTGPERiswH0qomhDoUUOo5kwybAKLQyE,11010
-urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160
-urllib3/contrib/pyopenssl.py,sha256=vgh6j52w9xgwq-3R2kfB5M2JblQATJfKAK3lIAc1kSg,16778
-urllib3/contrib/securetransport.py,sha256=KxGPZk8d4YepWm7Rc-SBt1XrzIfnLKc8JkUVV75XzgE,34286
-urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097
-urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-urllib3/contrib/_securetransport/bindings.py,sha256=E1_7ScsgOchfxneozbAueK7ziCwF35fna4DuDCYJ9_o,17637
-urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
-urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
-urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
-urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
-urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757
-urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
-urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
-urllib3/util/connection.py,sha256=21B-LX0c8fkxPDssyHCaK0pCnmrKmhltg5EoouHiAPU,4910
-urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
-urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
-urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
-urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
-urllib3/util/retry.py,sha256=tn168HDMUynFmXRP-uVaLRUOlbTEJikoB1RuZdwfCes,21366
-urllib3/util/ssl_.py,sha256=cUsmU604z2zAOZcaXDpINXOokQ1RtlJMe96TBDkaJp0,16199
-urllib3/util/ssltransport.py,sha256=IvGQvs9YWkf4jzfqVjTu_UWjwAUgPn5ActajW8VLz6A,6908
-urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
-urllib3/util/url.py,sha256=LWfLSlI4l2FmUMKfCkElCaW10-0N-sJDT9bxaDZJkjs,13964
-urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
-urllib3-1.26.0.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
-urllib3-1.26.0.dist-info/METADATA,sha256=Wghdt6nLf9HfZHhWj8Dpgz4n9vGRqXYhdIwJRPgki6M,42629
-urllib3-1.26.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
-urllib3-1.26.0.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
-urllib3-1.26.0.dist-info/RECORD,,
diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt b/third_party/python/urllib3/urllib3-1.26.17.dist-info/LICENSE.txt
index 429a1767e4..429a1767e4 100644
--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt
+++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/LICENSE.txt
diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA b/third_party/python/urllib3/urllib3-1.26.17.dist-info/METADATA
index 39869aafad..9493faee66 100644
--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA
+++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: urllib3
-Version: 1.26.0
+Version: 1.26.17
Summary: HTTP library with thread-safe connection pooling, file post, and more.
Home-page: https://urllib3.readthedocs.io/
Author: Andrey Petrov
@@ -10,7 +10,6 @@ Project-URL: Documentation, https://urllib3.readthedocs.io/
Project-URL: Code, https://github.com/urllib3/urllib3
Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
Keywords: urllib httplib threadsafe filepost http https ssl pooling
-Platform: UNKNOWN
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
@@ -19,27 +18,33 @@ Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Topic :: Software Development :: Libraries
-Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
Description-Content-Type: text/x-rst
+License-File: LICENSE.txt
Provides-Extra: brotli
-Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
+Requires-Dist: brotlicffi >=0.8.0 ; ((os_name != "nt" or python_version >= "3") and platform_python_implementation != "CPython") and extra == 'brotli'
+Requires-Dist: brotli ==1.0.9 ; (os_name != "nt" and python_version < "3" and platform_python_implementation == "CPython") and extra == 'brotli'
+Requires-Dist: brotlipy >=0.6.0 ; (os_name == "nt" and python_version < "3") and extra == 'brotli'
+Requires-Dist: brotli >=1.0.9 ; (python_version >= "3" and platform_python_implementation == "CPython") and extra == 'brotli'
Provides-Extra: secure
-Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
-Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
-Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
+Requires-Dist: pyOpenSSL >=0.14 ; extra == 'secure'
+Requires-Dist: cryptography >=1.3.4 ; extra == 'secure'
+Requires-Dist: idna >=2.0.0 ; extra == 'secure'
Requires-Dist: certifi ; extra == 'secure'
+Requires-Dist: urllib3-secure-extra ; extra == 'secure'
Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
Provides-Extra: socks
-Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
+Requires-Dist: PySocks !=1.5.7,<2.0,>=1.5.6 ; extra == 'socks'
urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
@@ -78,8 +83,10 @@ urllib3 can be installed with `pip <https://pip.pypa.io>`_::
Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
- $ git clone git://github.com/urllib3/urllib3.git
- $ python setup.py install
+ $ git clone https://github.com/urllib3/urllib3.git
+ $ cd urllib3
+ $ git checkout 1.26.x
+ $ pip install .
Documentation
@@ -148,6 +155,152 @@ For Enterprise
Changes
=======
+1.26.17 (2023-10-02)
+--------------------
+
+* Added the ``Cookie`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``.
+
+
+1.26.16 (2023-05-23)
+--------------------
+
+* Fixed thread-safety issue where accessing a ``PoolManager`` with many distinct origins
+ would cause connection pools to be closed while requests are in progress (`#2954 <https://github.com/urllib3/urllib3/pull/2954>`_)
+
+
+1.26.15 (2023-03-10)
+--------------------
+
+* Fix socket timeout value when ``HTTPConnection`` is reused (`#2645 <https://github.com/urllib3/urllib3/issues/2645>`__)
+* Remove "!" character from the unreserved characters in IPv6 Zone ID parsing
+ (`#2899 <https://github.com/urllib3/urllib3/issues/2899>`__)
+* Fix IDNA handling of '\x80' byte (`#2901 <https://github.com/urllib3/urllib3/issues/2901>`__)
+
+1.26.14 (2023-01-11)
+--------------------
+
+* Fixed parsing of port 0 (zero) returning None, instead of 0. (`#2850 <https://github.com/urllib3/urllib3/issues/2850>`__)
+* Removed deprecated getheaders() calls in contrib module.
+
+1.26.13 (2022-11-23)
+--------------------
+
+* Deprecated the ``HTTPResponse.getheaders()`` and ``HTTPResponse.getheader()`` methods.
+* Fixed an issue where parsing a URL with leading zeroes in the port would be rejected
+ even when the port number after removing the zeroes was valid.
+* Fixed a deprecation warning when using cryptography v39.0.0.
+* Removed the ``<4`` in the ``Requires-Python`` packaging metadata field.
+
+
+1.26.12 (2022-08-22)
+--------------------
+
+* Deprecated the `urllib3[secure]` extra and the `urllib3.contrib.pyopenssl` module.
+ Both will be removed in v2.x. See this `GitHub issue <https://github.com/urllib3/urllib3/issues/2680>`_
+ for justification and info on how to migrate.
+
+
+1.26.11 (2022-07-25)
+--------------------
+
+* Fixed an issue where reading more than 2 GiB in a call to ``HTTPResponse.read`` would
+ raise an ``OverflowError`` on Python 3.9 and earlier.
+
+
+1.26.10 (2022-07-07)
+--------------------
+
+* Removed support for Python 3.5
+* Fixed an issue where a ``ProxyError`` recommending configuring the proxy as HTTP
+ instead of HTTPS could appear even when an HTTPS proxy wasn't configured.
+
+
+1.26.9 (2022-03-16)
+-------------------
+
+* Changed ``urllib3[brotli]`` extra to favor installing Brotli libraries that are still
+ receiving updates like ``brotli`` and ``brotlicffi`` instead of ``brotlipy``.
+ This change does not impact behavior of urllib3, only which dependencies are installed.
+* Fixed a socket leaking when ``HTTPSConnection.connect()`` raises an exception.
+* Fixed ``server_hostname`` being forwarded from ``PoolManager`` to ``HTTPConnectionPool``
+ when requesting an HTTP URL. Should only be forwarded when requesting an HTTPS URL.
+
+
+1.26.8 (2022-01-07)
+-------------------
+
+* Added extra message to ``urllib3.exceptions.ProxyError`` when urllib3 detects that
+ a proxy is configured to use HTTPS but the proxy itself appears to only use HTTP.
+* Added a mention of the size of the connection pool when discarding a connection due to the pool being full.
+* Added explicit support for Python 3.11.
+* Deprecated the ``Retry.MAX_BACKOFF`` class property in favor of ``Retry.DEFAULT_MAX_BACKOFF``
+ to better match the rest of the default parameter names. ``Retry.MAX_BACKOFF`` is removed in v2.0.
+* Changed location of the vendored ``ssl.match_hostname`` function from ``urllib3.packages.ssl_match_hostname``
+ to ``urllib3.util.ssl_match_hostname`` to ensure Python 3.10+ compatibility after being repackaged
+ by downstream distributors.
+* Fixed absolute imports, all imports are now relative.
+
+
+1.26.7 (2021-09-22)
+-------------------
+
+* Fixed a bug with HTTPS hostname verification involving IP addresses and lack
+ of SNI. (Issue #2400)
+* Fixed a bug where IPv6 braces weren't stripped during certificate hostname
+ matching. (Issue #2240)
+
+
+1.26.6 (2021-06-25)
+-------------------
+
+* Deprecated the ``urllib3.contrib.ntlmpool`` module. urllib3 is not able to support
+ it properly due to `reasons listed in this issue <https://github.com/urllib3/urllib3/issues/2282>`_.
+ If you are a user of this module please leave a comment.
+* Changed ``HTTPConnection.request_chunked()`` to not erroneously emit multiple
+ ``Transfer-Encoding`` headers in the case that one is already specified.
+* Fixed typo in deprecation message to recommend ``Retry.DEFAULT_ALLOWED_METHODS``.
+
+
+1.26.5 (2021-05-26)
+-------------------
+
+* Fixed deprecation warnings emitted in Python 3.10.
+* Updated vendored ``six`` library to 1.16.0.
+* Improved performance of URL parser when splitting
+ the authority component.
+
+
+1.26.4 (2021-03-15)
+-------------------
+
+* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy
+ during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``.
+
+
+1.26.3 (2021-01-26)
+-------------------
+
+* Fixed bytes and string comparison issue with headers (Pull #2141)
+
+* Changed ``ProxySchemeUnknown`` error message to be
+ more actionable if the user supplies a proxy URL without
+ a scheme. (Pull #2107)
+
+
+1.26.2 (2020-11-12)
+-------------------
+
+* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't
+ be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+
+1.26.1 (2020-11-11)
+-------------------
+
+* Fixed an issue where two ``User-Agent`` headers would be sent if a
+ ``User-Agent`` header key is passed as ``bytes`` (Pull #2047)
+
+
1.26.0 (2020-11-10)
-------------------
@@ -1331,5 +1484,3 @@ Changes
----------------
* First release.
-
-
diff --git a/third_party/python/urllib3/urllib3-1.26.17.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.26.17.dist-info/RECORD
new file mode 100644
index 0000000000..1afc658058
--- /dev/null
+++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/RECORD
@@ -0,0 +1,44 @@
+urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333
+urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
+urllib3/_version.py,sha256=azoM7M7BUADl2kBhMVR6PPf2GhBDI90me1fcnzTwdcw,64
+urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300
+urllib3/connectionpool.py,sha256=ItVDasDnPRPP9R8bNxY7tPBlC724nJ9nlxVgXG_SLbI,39990
+urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
+urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
+urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
+urllib3/poolmanager.py,sha256=0i8cJgrqupza67IBPZ_u9jXvnSxr5UBlVEiUqdkPtYI,19752
+urllib3/request.py,sha256=YTWFNr7QIwh7E1W9dde9LM77v2VWTJ5V78XuTTw7D1A,6691
+urllib3/response.py,sha256=UPgLmnHj4z71ZnH8ivYOyncATifTOw9FQukUqDnckCc,30761
+urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
+urllib3/contrib/appengine.py,sha256=6IBW6lPOoVUxASPwtn6IH1AATe5DK3lLJCfwyWlLKAE,11012
+urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528
+urllib3/contrib/pyopenssl.py,sha256=4AJAlo9NmjWofY4dJwRa4kbZuRuHfNJxu8Pv6yQk1ss,17055
+urllib3/contrib/securetransport.py,sha256=QOhVbWrFQTKbmV-vtyG69amekkKVxXkdjk9oymaO0Ag,34416
+urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097
+urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632
+urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922
+urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665
+urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
+urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343
+urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
+urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901
+urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605
+urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
+urllib3/util/request.py,sha256=fWiAaa8pwdLLIqoTLBxCC2e4ed80muzKU3e3HWWTzFQ,4225
+urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
+urllib3/util/retry.py,sha256=Z6WEf518eTOXP5jr5QSQ9gqJI0DVYt3Xs3EKnYaTmus,22013
+urllib3/util/ssl_.py,sha256=c0sYiSC6272r6uPkxQpo5rYPP9QC1eR6oI7004gYqZo,17165
+urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758
+urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895
+urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168
+urllib3/util/url.py,sha256=kMxL1k0d-aQm_iZDw_zMmnyYyjrIA_DbsMy3cm3V55M,14279
+urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403
+urllib3-1.26.17.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
+urllib3-1.26.17.dist-info/METADATA,sha256=swEiQKmb2m5Vl4fygmy4aLSzZjxDjD8q2-_XzuhO9pA,48743
+urllib3-1.26.17.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110
+urllib3-1.26.17.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
+urllib3-1.26.17.dist-info/RECORD,,
diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL b/third_party/python/urllib3/urllib3-1.26.17.dist-info/WHEEL
index 6d38aa0601..c34f1162ef 100644
--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL
+++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/WHEEL
@@ -1,5 +1,5 @@
Wheel-Version: 1.0
-Generator: bdist_wheel (0.35.1)
+Generator: bdist_wheel (0.41.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any
diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt b/third_party/python/urllib3/urllib3-1.26.17.dist-info/top_level.txt
index a42590bebe..a42590bebe 100644
--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt
+++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/top_level.txt
diff --git a/third_party/python/urllib3/urllib3/__init__.py b/third_party/python/urllib3/urllib3/__init__.py
index fe86b59d78..c6fa38212f 100644
--- a/third_party/python/urllib3/urllib3/__init__.py
+++ b/third_party/python/urllib3/urllib3/__init__.py
@@ -19,6 +19,23 @@ from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host
+# === NOTE TO REPACKAGERS AND VENDORS ===
+# Please delete this block, this logic is only
+# for urllib3 being distributed via PyPI.
+# See: https://github.com/urllib3/urllib3/issues/2680
+try:
+ import urllib3_secure_extra # type: ignore # noqa: F401
+except ImportError:
+ pass
+else:
+ warnings.warn(
+ "'urllib3[secure]' extra is deprecated and will be removed "
+ "in a future release of urllib3 2.x. Read more in this issue: "
+ "https://github.com/urllib3/urllib3/issues/2680",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = __version__
diff --git a/third_party/python/urllib3/urllib3/_version.py b/third_party/python/urllib3/urllib3/_version.py
index cee465f88a..cad75fb5df 100644
--- a/third_party/python/urllib3/urllib3/_version.py
+++ b/third_party/python/urllib3/urllib3/_version.py
@@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS
-__version__ = "1.26.0"
+__version__ = "1.26.17"
diff --git a/third_party/python/urllib3/urllib3/connection.py b/third_party/python/urllib3/urllib3/connection.py
index 52487417c9..54b96b1915 100644
--- a/third_party/python/urllib3/urllib3/connection.py
+++ b/third_party/python/urllib3/urllib3/connection.py
@@ -43,6 +43,7 @@ except NameError: # Python 2:
pass
+from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
@@ -50,15 +51,16 @@ from .exceptions import (
SubjectAltNameWarning,
SystemTimeWarning,
)
-from .packages.ssl_match_hostname import CertificateError, match_hostname
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import (
assert_fingerprint,
create_urllib3_context,
+ is_ipaddress,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
+from .util.ssl_match_hostname import CertificateError, match_hostname
log = logging.getLogger(__name__)
@@ -66,7 +68,7 @@ port_by_scheme = {"http": 80, "https": 443}
# When it comes time to update this value as a part of regular maintenance
# (ie test_recent_date is failing) update it to ~6 months before the current date.
-RECENT_DATE = datetime.date(2019, 1, 1)
+RECENT_DATE = datetime.date(2022, 1, 1)
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
@@ -106,6 +108,10 @@ class HTTPConnection(_HTTPConnection, object):
#: Whether this connection verifies the host's certificate.
is_verified = False
+ #: Whether this proxy connection (if used) verifies the proxy host's
+ #: certificate.
+ proxy_is_verified = None
+
def __init__(self, *args, **kw):
if not six.PY2:
kw.pop("strict", None)
@@ -200,7 +206,7 @@ class HTTPConnection(_HTTPConnection, object):
self._prepare_conn(conn)
def putrequest(self, method, url, *args, **kwargs):
- """"""
+ """ """
# Empty docstring because the indentation of CPython's implementation
# is broken but we don't want this method in our documentation.
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
@@ -213,8 +219,8 @@ class HTTPConnection(_HTTPConnection, object):
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
def putheader(self, header, *values):
- """"""
- if SKIP_HEADER not in values:
+ """ """
+ if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
_HTTPConnection.putheader(self, header, *values)
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
raise ValueError(
@@ -223,12 +229,17 @@ class HTTPConnection(_HTTPConnection, object):
)
def request(self, method, url, body=None, headers=None):
+ # Update the inner socket's timeout value to send the request.
+ # This only triggers if the connection is re-used.
+ if getattr(self, "sock", None) is not None:
+ self.sock.settimeout(self.timeout)
+
if headers is None:
headers = {}
else:
# Avoid modifying the headers passed into .request()
headers = headers.copy()
- if "user-agent" not in (k.lower() for k in headers):
+ if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
headers["User-Agent"] = _get_default_user_agent()
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
@@ -248,7 +259,7 @@ class HTTPConnection(_HTTPConnection, object):
self.putheader("User-Agent", _get_default_user_agent())
for header, value in headers.items():
self.putheader(header, value)
- if "transfer-encoding" not in headers:
+ if "transfer-encoding" not in header_keys:
self.putheader("Transfer-Encoding", "chunked")
self.endheaders()
@@ -349,17 +360,15 @@ class HTTPSConnection(HTTPConnection):
def connect(self):
# Add certificate verification
- conn = self._new_conn()
+ self.sock = conn = self._new_conn()
hostname = self.host
tls_in_tls = False
if self._is_using_tunnel():
if self.tls_in_tls_required:
- conn = self._connect_tls_proxy(hostname, conn)
+ self.sock = conn = self._connect_tls_proxy(hostname, conn)
tls_in_tls = True
- self.sock = conn
-
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
@@ -492,7 +501,7 @@ class HTTPSConnection(HTTPConnection):
# If no cert was provided, use only the default options for server
# certificate validation
- return ssl_wrap_socket(
+ socket = ssl_wrap_socket(
sock=conn,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
@@ -501,8 +510,37 @@ class HTTPSConnection(HTTPConnection):
ssl_context=ssl_context,
)
+ if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
+ ssl_context, "check_hostname", False
+ ):
+ # While urllib3 attempts to always turn off hostname matching from
+ # the TLS library, this cannot always be done. So we check whether
+ # the TLS Library still thinks it's matching hostnames.
+ cert = socket.getpeercert()
+ if not cert.get("subjectAltName", ()):
+ warnings.warn(
+ (
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
+ "`commonName` for now. This feature is being removed by major browsers and "
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
+ "for details.)".format(hostname)
+ ),
+ SubjectAltNameWarning,
+ )
+ _match_hostname(cert, hostname)
+
+ self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
+ return socket
+
def _match_hostname(cert, asserted_hostname):
+ # Our upstream implementation of ssl.match_hostname()
+ # only applies this normalization to IP addresses so it doesn't
+ # match DNS SANs so we do the same thing!
+ stripped_hostname = asserted_hostname.strip("u[]")
+ if is_ipaddress(stripped_hostname):
+ asserted_hostname = stripped_hostname
+
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
diff --git a/third_party/python/urllib3/urllib3/connectionpool.py b/third_party/python/urllib3/urllib3/connectionpool.py
index 4708c5bfc7..96844d9337 100644
--- a/third_party/python/urllib3/urllib3/connectionpool.py
+++ b/third_party/python/urllib3/urllib3/connectionpool.py
@@ -2,6 +2,7 @@ from __future__ import absolute_import
import errno
import logging
+import re
import socket
import sys
import warnings
@@ -35,7 +36,6 @@ from .exceptions import (
)
from .packages import six
from .packages.six.moves import queue
-from .packages.ssl_match_hostname import CertificateError
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
@@ -44,11 +44,19 @@ from .util.queue import LifoQueue
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
+from .util.ssl_match_hostname import CertificateError
from .util.timeout import Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import get_host, parse_url
+try: # Platform-specific: Python 3
+ import weakref
+
+ weakref_finalize = weakref.finalize
+except AttributeError: # Platform-specific: Python 2
+ from .packages.backports.weakref_finalize import weakref_finalize
+
xrange = six.moves.xrange
log = logging.getLogger(__name__)
@@ -219,6 +227,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
self.conn_kw["proxy"] = self.proxy
self.conn_kw["proxy_config"] = self.proxy_config
+ # Do not pass 'self' as callback to 'finalize'.
+ # Then the 'finalize' would keep an endless living (leak) to self.
+ # By just passing a reference to the pool allows the garbage collector
+ # to free self if nobody else has a reference to it.
+ pool = self.pool
+
+ # Close all the HTTPConnections in the pool before the
+ # HTTPConnectionPool object is garbage collected.
+ weakref_finalize(self, _close_pool_connections, pool)
+
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
@@ -301,8 +319,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
pass
except queue.Full:
# This should never happen if self.block == True
- log.warning("Connection pool is full, discarding connection: %s", self.host)
-
+ log.warning(
+ "Connection pool is full, discarding connection: %s. Connection pool size: %s",
+ self.host,
+ self.pool.qsize(),
+ )
# Connection never got put back into the pool, close it.
if conn:
conn.close()
@@ -318,7 +339,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
pass
def _get_timeout(self, timeout):
- """ Helper that always returns a :class:`urllib3.util.Timeout` """
+ """Helper that always returns a :class:`urllib3.util.Timeout`"""
if timeout is _Default:
return self.timeout.clone()
@@ -375,7 +396,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
- conn.timeout = timeout_obj.connect_timeout
+ conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
# Trigger any extra validation we need to do.
try:
@@ -485,14 +506,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Disable access to the pool
old_pool, self.pool = self.pool, None
- try:
- while True:
- conn = old_pool.get(block=False)
- if conn:
- conn.close()
-
- except queue.Empty:
- pass # Done.
+ # Close all the HTTPConnections in the pool.
+ _close_pool_connections(old_pool)
def is_same_host(self, url):
"""
@@ -745,7 +760,35 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
- if isinstance(e, (BaseSSLError, CertificateError)):
+
+ def _is_ssl_error_message_from_http_proxy(ssl_error):
+ # We're trying to detect the message 'WRONG_VERSION_NUMBER' but
+ # SSLErrors are kinda all over the place when it comes to the message,
+ # so we try to cover our bases here!
+ message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
+ return (
+ "wrong version number" in message or "unknown protocol" in message
+ )
+
+ # Try to detect a common user error with proxies which is to
+ # set an HTTP proxy to be HTTPS when it should be 'http://'
+ # (ie {'http': 'http://proxy', 'https': 'https://proxy'})
+ # Instead we add a nice error message and point to a URL.
+ if (
+ isinstance(e, BaseSSLError)
+ and self.proxy
+ and _is_ssl_error_message_from_http_proxy(e)
+ and conn.proxy
+ and conn.proxy.scheme == "https"
+ ):
+ e = ProxyError(
+ "Your proxy appears to only use HTTP and not HTTPS, "
+ "try changing your proxy URL to be HTTP. See: "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#https-proxy-error-http-proxy",
+ SSLError(e),
+ )
+ elif isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError("Cannot connect to proxy.", e)
@@ -830,7 +873,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
)
# Check if we should retry the HTTP response.
- has_retry_after = bool(response.getheader("Retry-After"))
+ has_retry_after = bool(response.headers.get("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
@@ -1014,12 +1057,23 @@ class HTTPSConnectionPool(HTTPConnectionPool):
(
"Unverified HTTPS request is being made to host '%s'. "
"Adding certificate verification is strongly advised. See: "
- "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings" % conn.host
),
InsecureRequestWarning,
)
+ if getattr(conn, "proxy_is_verified", None) is False:
+ warnings.warn(
+ (
+ "Unverified HTTPS connection done to an HTTPS proxy. "
+ "Adding certificate verification is strongly advised. See: "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings"
+ ),
+ InsecureRequestWarning,
+ )
+
def connection_from_url(url, **kw):
"""
@@ -1065,3 +1119,14 @@ def _normalize_host(host, scheme):
if host.startswith("[") and host.endswith("]"):
host = host[1:-1]
return host
+
+
+def _close_pool_connections(pool):
+ """Drains a queue of connections and closes each one."""
+ try:
+ while True:
+ conn = pool.get(block=False)
+ if conn:
+ conn.close()
+ except queue.Empty:
+ pass # Done.
diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
index 11524d400b..264d564dbd 100644
--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
+++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
@@ -48,7 +48,7 @@ from ctypes import (
)
from ctypes.util import find_library
-from urllib3.packages.six import raise_from
+from ...packages.six import raise_from
if platform.system() != "Darwin":
raise ImportError("Only macOS is supported")
diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
index ed8120190c..fa0b245d27 100644
--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
+++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
@@ -188,6 +188,7 @@ def _cert_array_from_pem(pem_bundle):
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
+ raise
return cert_array
diff --git a/third_party/python/urllib3/urllib3/contrib/appengine.py b/third_party/python/urllib3/urllib3/contrib/appengine.py
index aa64a0914c..a5a6d91035 100644
--- a/third_party/python/urllib3/urllib3/contrib/appengine.py
+++ b/third_party/python/urllib3/urllib3/contrib/appengine.py
@@ -111,7 +111,7 @@ class AppEngineManager(RequestMethods):
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
- "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
+ "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
AppEnginePlatformWarning,
)
@@ -224,7 +224,7 @@ class AppEngineManager(RequestMethods):
)
# Check if we should retry the HTTP response.
- has_retry_after = bool(http_response.getheader("Retry-After"))
+ has_retry_after = bool(http_response.headers.get("Retry-After"))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
diff --git a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
index b2df45dcf6..471665754e 100644
--- a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
+++ b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
@@ -5,6 +5,7 @@ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
+import warnings
from logging import getLogger
from ntlm import ntlm
@@ -12,6 +13,14 @@ from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
+warnings.warn(
+ "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
+ "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
+ "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
+ "If you are a user of this module please comment in the mentioned issue.",
+ DeprecationWarning,
+)
+
log = getLogger(__name__)
@@ -60,7 +69,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
log.debug("Request headers: %s", headers)
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
- reshdr = dict(res.getheaders())
+ reshdr = dict(res.headers)
log.debug("Response status: %s %s", res.status, res.reason)
log.debug("Response headers: %s", reshdr)
log.debug("Response data: %s [...]", res.read(100))
@@ -92,7 +101,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
log.debug("Response status: %s %s", res.status, res.reason)
- log.debug("Response headers: %s", dict(res.getheaders()))
+ log.debug("Response headers: %s", dict(res.headers))
log.debug("Response data: %s [...]", res.read()[:100])
if res.status != 200:
if res.status == 401:
diff --git a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
index 0cabab1aed..1ed214b1d7 100644
--- a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
+++ b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
@@ -47,10 +47,10 @@ compression in Python 2 (see `CRIME attack`_).
"""
from __future__ import absolute_import
+import OpenSSL.crypto
import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
-from cryptography.hazmat.backends.openssl.x509 import _Certificate
try:
from cryptography.x509 import UnsupportedExtension
@@ -73,9 +73,19 @@ except ImportError: # Platform-specific: Python 3
import logging
import ssl
import sys
+import warnings
from .. import util
from ..packages import six
+from ..util.ssl_ import PROTOCOL_TLS_CLIENT
+
+warnings.warn(
+ "'urllib3.contrib.pyopenssl' module is deprecated and will be removed "
+ "in a future release of urllib3 2.x. Read more in this issue: "
+ "https://github.com/urllib3/urllib3/issues/2680",
+ category=DeprecationWarning,
+ stacklevel=2,
+)
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
@@ -85,6 +95,7 @@ HAS_SNI = True
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+ PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
@@ -217,9 +228,8 @@ def get_subj_alt_name(peer_cert):
if hasattr(peer_cert, "to_cryptography"):
cert = peer_cert.to_cryptography()
else:
- # This is technically using private APIs, but should work across all
- # relevant versions before PyOpenSSL got a proper API for this.
- cert = _Certificate(openssl_backend, peer_cert._x509)
+ der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
+ cert = x509.load_der_x509_certificate(der, openssl_backend)
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)
@@ -404,7 +414,6 @@ if _fileobject: # Platform-specific: Python 2
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
-
else: # Platform-specific: Python 3
makefile = backport_makefile
diff --git a/third_party/python/urllib3/urllib3/contrib/securetransport.py b/third_party/python/urllib3/urllib3/contrib/securetransport.py
index ab092de67a..6c46a3b9f0 100644
--- a/third_party/python/urllib3/urllib3/contrib/securetransport.py
+++ b/third_party/python/urllib3/urllib3/contrib/securetransport.py
@@ -67,6 +67,7 @@ import weakref
import six
from .. import util
+from ..util.ssl_ import PROTOCOL_TLS_CLIENT
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.low_level import (
_assert_no_error,
@@ -154,7 +155,8 @@ CIPHER_SUITES = [
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
# TLSv1 to 1.2 are supported on macOS 10.8+
_protocol_to_min_max = {
- util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12)
+ util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
+ PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
@@ -768,7 +770,6 @@ if _fileobject: # Platform-specific: Python 2
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
-
else: # Platform-specific: Python 3
def makefile(self, mode="r", buffering=None, *args, **kwargs):
diff --git a/third_party/python/urllib3/urllib3/contrib/socks.py b/third_party/python/urllib3/urllib3/contrib/socks.py
index 93df8325d5..c326e80dd1 100644
--- a/third_party/python/urllib3/urllib3/contrib/socks.py
+++ b/third_party/python/urllib3/urllib3/contrib/socks.py
@@ -51,7 +51,7 @@ except ImportError:
(
"SOCKS support in urllib3 requires the installation of optional "
"dependencies: specifically, PySocks. For more information, see "
- "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies"
+ "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
),
DependencyWarning,
)
diff --git a/third_party/python/urllib3/urllib3/exceptions.py b/third_party/python/urllib3/urllib3/exceptions.py
index d69958d5df..cba6f3f560 100644
--- a/third_party/python/urllib3/urllib3/exceptions.py
+++ b/third_party/python/urllib3/urllib3/exceptions.py
@@ -289,7 +289,17 @@ class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
- message = "Not supported proxy scheme %s" % scheme
+ # 'localhost' is here because our URL parser parses
+ # localhost:8080 -> scheme=localhost, remove if we fix this.
+ if scheme == "localhost":
+ scheme = None
+ if scheme is None:
+ message = "Proxy URL had no scheme, should start with http:// or https://"
+ else:
+ message = (
+ "Proxy URL had unsupported scheme %s, should use http:// or https://"
+ % scheme
+ )
super(ProxySchemeUnknown, self).__init__(message)
diff --git a/third_party/python/urllib3/urllib3/packages/__init__.py b/third_party/python/urllib3/urllib3/packages/__init__.py
index fce4caa65d..e69de29bb2 100644
--- a/third_party/python/urllib3/urllib3/packages/__init__.py
+++ b/third_party/python/urllib3/urllib3/packages/__init__.py
@@ -1,5 +0,0 @@
-from __future__ import absolute_import
-
-from . import ssl_match_hostname
-
-__all__ = ("ssl_match_hostname",)
diff --git a/third_party/python/urllib3/urllib3/packages/backports/weakref_finalize.py b/third_party/python/urllib3/urllib3/packages/backports/weakref_finalize.py
new file mode 100644
index 0000000000..a2f2966e54
--- /dev/null
+++ b/third_party/python/urllib3/urllib3/packages/backports/weakref_finalize.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+"""
+backports.weakref_finalize
+~~~~~~~~~~~~~~~~~~
+
+Backports the Python 3 ``weakref.finalize`` method.
+"""
+from __future__ import absolute_import
+
+import itertools
+import sys
+from weakref import ref
+
+__all__ = ["weakref_finalize"]
+
+
+class weakref_finalize(object):
+ """Class for finalization of weakrefable objects
+ finalize(obj, func, *args, **kwargs) returns a callable finalizer
+ object which will be called when obj is garbage collected. The
+ first time the finalizer is called it evaluates func(*arg, **kwargs)
+ and returns the result. After this the finalizer is dead, and
+ calling it just returns None.
+ When the program exits any remaining finalizers for which the
+ atexit attribute is true will be run in reverse order of creation.
+ By default atexit is true.
+ """
+
+ # Finalizer objects don't have any state of their own. They are
+ # just used as keys to lookup _Info objects in the registry. This
+ # ensures that they cannot be part of a ref-cycle.
+
+ __slots__ = ()
+ _registry = {}
+ _shutdown = False
+ _index_iter = itertools.count()
+ _dirty = False
+ _registered_with_atexit = False
+
+ class _Info(object):
+ __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
+
+ def __init__(self, obj, func, *args, **kwargs):
+ if not self._registered_with_atexit:
+ # We may register the exit function more than once because
+ # of a thread race, but that is harmless
+ import atexit
+
+ atexit.register(self._exitfunc)
+ weakref_finalize._registered_with_atexit = True
+ info = self._Info()
+ info.weakref = ref(obj, self)
+ info.func = func
+ info.args = args
+ info.kwargs = kwargs or None
+ info.atexit = True
+ info.index = next(self._index_iter)
+ self._registry[self] = info
+ weakref_finalize._dirty = True
+
+ def __call__(self, _=None):
+ """If alive then mark as dead and return func(*args, **kwargs);
+ otherwise return None"""
+ info = self._registry.pop(self, None)
+ if info and not self._shutdown:
+ return info.func(*info.args, **(info.kwargs or {}))
+
+ def detach(self):
+ """If alive then mark as dead and return (obj, func, args, kwargs);
+ otherwise return None"""
+ info = self._registry.get(self)
+ obj = info and info.weakref()
+ if obj is not None and self._registry.pop(self, None):
+ return (obj, info.func, info.args, info.kwargs or {})
+
+ def peek(self):
+ """If alive then return (obj, func, args, kwargs);
+ otherwise return None"""
+ info = self._registry.get(self)
+ obj = info and info.weakref()
+ if obj is not None:
+ return (obj, info.func, info.args, info.kwargs or {})
+
+ @property
+ def alive(self):
+ """Whether finalizer is alive"""
+ return self in self._registry
+
+ @property
+ def atexit(self):
+ """Whether finalizer should be called at exit"""
+ info = self._registry.get(self)
+ return bool(info) and info.atexit
+
+ @atexit.setter
+ def atexit(self, value):
+ info = self._registry.get(self)
+ if info:
+ info.atexit = bool(value)
+
+ def __repr__(self):
+ info = self._registry.get(self)
+ obj = info and info.weakref()
+ if obj is None:
+ return "<%s object at %#x; dead>" % (type(self).__name__, id(self))
+ else:
+ return "<%s object at %#x; for %r at %#x>" % (
+ type(self).__name__,
+ id(self),
+ type(obj).__name__,
+ id(obj),
+ )
+
+ @classmethod
+ def _select_for_exit(cls):
+ # Return live finalizers marked for exit, oldest first
+ L = [(f, i) for (f, i) in cls._registry.items() if i.atexit]
+ L.sort(key=lambda item: item[1].index)
+ return [f for (f, i) in L]
+
+ @classmethod
+ def _exitfunc(cls):
+ # At shutdown invoke finalizers for which atexit is true.
+ # This is called once all other non-daemonic threads have been
+ # joined.
+ reenable_gc = False
+ try:
+ if cls._registry:
+ import gc
+
+ if gc.isenabled():
+ reenable_gc = True
+ gc.disable()
+ pending = None
+ while True:
+ if pending is None or weakref_finalize._dirty:
+ pending = cls._select_for_exit()
+ weakref_finalize._dirty = False
+ if not pending:
+ break
+ f = pending.pop()
+ try:
+ # gc is disabled, so (assuming no daemonic
+ # threads) the following is the only line in
+ # this function which might trigger creation
+ # of a new finalizer
+ f()
+ except Exception:
+ sys.excepthook(*sys.exc_info())
+ assert f not in cls._registry
+ finally:
+ # prevent any more finalizers from executing during shutdown
+ weakref_finalize._shutdown = True
+ if reenable_gc:
+ gc.enable()
diff --git a/third_party/python/urllib3/urllib3/packages/six.py b/third_party/python/urllib3/urllib3/packages/six.py
index 314424099f..f099a3dcd2 100644
--- a/third_party/python/urllib3/urllib3/packages/six.py
+++ b/third_party/python/urllib3/urllib3/packages/six.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2010-2019 Benjamin Peterson
+# Copyright (c) 2010-2020 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -29,7 +29,7 @@ import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.12.0"
+__version__ = "1.16.0"
# Useful for very coarse version differentiation.
@@ -71,6 +71,11 @@ else:
MAXSIZE = int((1 << 63) - 1)
del X
+if PY34:
+ from importlib.util import spec_from_loader
+else:
+ spec_from_loader = None
+
def _add_doc(func, doc):
"""Add documentation to a function."""
@@ -182,6 +187,11 @@ class _SixMetaPathImporter(object):
return self
return None
+ def find_spec(self, fullname, path, target=None):
+ if fullname in self.known_modules:
+ return spec_from_loader(fullname, self)
+ return None
+
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
@@ -220,6 +230,12 @@ class _SixMetaPathImporter(object):
get_source = get_code # same as get_code
+ def create_module(self, spec):
+ return self.load_module(spec.name)
+
+ def exec_module(self, module):
+ pass
+
_importer = _SixMetaPathImporter(__name__)
@@ -260,9 +276,19 @@ _moved_attributes = [
),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
+ MovedModule(
+ "collections_abc",
+ "collections",
+ "collections.abc" if sys.version_info >= (3, 3) else "collections",
+ ),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule(
+ "_dummy_thread",
+ "dummy_thread",
+ "_dummy_thread" if sys.version_info < (3, 9) else "_thread",
+ ),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
@@ -307,7 +333,9 @@ _moved_attributes = [
]
# Add windows specific modules.
if sys.platform == "win32":
- _moved_attributes += [MovedModule("winreg", "_winreg")]
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
@@ -476,7 +504,7 @@ class Module_six_moves_urllib_robotparser(_LazyModule):
_urllib_robotparser_moved_attributes = [
- MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser")
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
@@ -678,9 +706,11 @@ if PY3:
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
+ _assertNotRegex = "assertNotRegex"
else:
def b(s):
@@ -707,6 +737,7 @@ else:
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
@@ -723,6 +754,10 @@ def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
if PY3:
exec_ = getattr(moves.builtins, "exec")
@@ -737,7 +772,6 @@ if PY3:
value = None
tb = None
-
else:
def exec_(_code_, _globs_=None, _locs_=None):
@@ -750,7 +784,7 @@ else:
del frame
elif _locs_ is None:
_locs_ = _globs_
- exec("""exec _code_ in _globs_, _locs_""")
+ exec ("""exec _code_ in _globs_, _locs_""")
exec_(
"""def reraise(tp, value, tb=None):
@@ -762,18 +796,7 @@ else:
)
-if sys.version_info[:2] == (3, 2):
- exec_(
- """def raise_from(value, from_value):
- try:
- if from_value is None:
- raise value
- raise value from from_value
- finally:
- value = None
-"""
- )
-elif sys.version_info[:2] > (3, 2):
+if sys.version_info[:2] > (3,):
exec_(
"""def raise_from(value, from_value):
try:
@@ -863,19 +886,41 @@ if sys.version_info[:2] < (3, 3):
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(
+ wrapper,
+ wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES,
+ ):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
def wraps(
wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES,
):
- def wrapper(f):
- f = functools.wraps(wrapped, assigned, updated)(f)
- f.__wrapped__ = wrapped
- return f
-
- return wrapper
+ return functools.partial(
+ _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
+ )
+ wraps.__doc__ = functools.wraps.__doc__
else:
wraps = functools.wraps
@@ -888,7 +933,15 @@ def with_metaclass(meta, *bases):
# the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d["__orig_bases__"] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
@@ -928,12 +981,11 @@ def ensure_binary(s, encoding="utf-8", errors="strict"):
- `str` -> encoded to `bytes`
- `bytes` -> `bytes`
"""
+ if isinstance(s, binary_type):
+ return s
if isinstance(s, text_type):
return s.encode(encoding, errors)
- elif isinstance(s, binary_type):
- return s
- else:
- raise TypeError("not expecting type '%s'" % type(s))
+ raise TypeError("not expecting type '%s'" % type(s))
def ensure_str(s, encoding="utf-8", errors="strict"):
@@ -947,12 +999,15 @@ def ensure_str(s, encoding="utf-8", errors="strict"):
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
- if not isinstance(s, (text_type, binary_type)):
- raise TypeError("not expecting type '%s'" % type(s))
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
if PY2 and isinstance(s, text_type):
- s = s.encode(encoding, errors)
+ return s.encode(encoding, errors)
elif PY3 and isinstance(s, binary_type):
- s = s.decode(encoding, errors)
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
return s
@@ -977,7 +1032,7 @@ def ensure_text(s, encoding="utf-8", errors="strict"):
def python_2_unicode_compatible(klass):
"""
- A decorator that defines __unicode__ and __str__ methods under Python 2.
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
diff --git a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py b/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
deleted file mode 100644
index 6b12fd90aa..0000000000
--- a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import sys
-
-try:
- # Our match_hostname function is the same as 3.5's, so we only want to
- # import the match_hostname function if it's at least that good.
- if sys.version_info < (3, 5):
- raise ImportError("Fallback to vendored code")
-
- from ssl import CertificateError, match_hostname
-except ImportError:
- try:
- # Backport of the function from a pypi module
- from backports.ssl_match_hostname import ( # type: ignore
- CertificateError,
- match_hostname,
- )
- except ImportError:
- # Our vendored copy
- from ._implementation import CertificateError, match_hostname # type: ignore
-
-# Not needed, but documenting what we provide.
-__all__ = ("CertificateError", "match_hostname")
diff --git a/third_party/python/urllib3/urllib3/poolmanager.py b/third_party/python/urllib3/urllib3/poolmanager.py
index 3a31a285bf..14b10daf3a 100644
--- a/third_party/python/urllib3/urllib3/poolmanager.py
+++ b/third_party/python/urllib3/urllib3/poolmanager.py
@@ -34,6 +34,7 @@ SSL_KEYWORDS = (
"ca_cert_dir",
"ssl_context",
"key_password",
+ "server_hostname",
)
# All known keyword arguments that could be provided to the pool manager, its
@@ -170,7 +171,7 @@ class PoolManager(RequestMethods):
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
- self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
+ self.pools = RecentlyUsedContainer(num_pools)
# Locally set the pool classes and keys so other PoolManagers can
# override them.
diff --git a/third_party/python/urllib3/urllib3/request.py b/third_party/python/urllib3/urllib3/request.py
index 398386a5b9..3b4cf99922 100644
--- a/third_party/python/urllib3/urllib3/request.py
+++ b/third_party/python/urllib3/urllib3/request.py
@@ -1,6 +1,9 @@
from __future__ import absolute_import
+import sys
+
from .filepost import encode_multipart_formdata
+from .packages import six
from .packages.six.moves.urllib.parse import urlencode
__all__ = ["RequestMethods"]
@@ -168,3 +171,21 @@ class RequestMethods(object):
extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw)
+
+
+if not six.PY2:
+
+ class RequestModule(sys.modules[__name__].__class__):
+ def __call__(self, *args, **kwargs):
+ """
+ If user tries to call this module directly urllib3 v2.x style raise an error to the user
+ suggesting they may need urllib3 v2
+ """
+ raise TypeError(
+ "'module' object is not callable\n"
+ "urllib3.request() method is not supported in this release, "
+ "upgrade to urllib3 v2 to use it\n"
+ "see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html"
+ )
+
+ sys.modules[__name__].__class__ = RequestModule
diff --git a/third_party/python/urllib3/urllib3/response.py b/third_party/python/urllib3/urllib3/response.py
index 38693f4fc6..0bd13d40b8 100644
--- a/third_party/python/urllib3/urllib3/response.py
+++ b/third_party/python/urllib3/urllib3/response.py
@@ -2,16 +2,22 @@ from __future__ import absolute_import
import io
import logging
+import sys
+import warnings
import zlib
from contextlib import contextmanager
from socket import error as SocketError
from socket import timeout as SocketTimeout
try:
- import brotli
+ try:
+ import brotlicffi as brotli
+ except ImportError:
+ import brotli
except ImportError:
brotli = None
+from . import util
from ._collections import HTTPHeaderDict
from .connection import BaseSSLError, HTTPException
from .exceptions import (
@@ -478,6 +484,54 @@ class HTTPResponse(io.IOBase):
if self._original_response and self._original_response.isclosed():
self.release_conn()
+ def _fp_read(self, amt):
+ """
+ Read a response with the thought that reading the number of bytes
+ larger than can fit in a 32-bit int at a time via SSL in some
+ known cases leads to an overflow error that has to be prevented
+ if `amt` or `self.length_remaining` indicate that a problem may
+ happen.
+
+ The known cases:
+ * 3.8 <= CPython < 3.9.7 because of a bug
+ https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
+ * urllib3 injected with pyOpenSSL-backed SSL-support.
+ * CPython < 3.10 only when `amt` does not fit 32-bit int.
+ """
+ assert self._fp
+ c_int_max = 2 ** 31 - 1
+ if (
+ (
+ (amt and amt > c_int_max)
+ or (self.length_remaining and self.length_remaining > c_int_max)
+ )
+ and not util.IS_SECURETRANSPORT
+ and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
+ ):
+ buffer = io.BytesIO()
+ # Besides `max_chunk_amt` being a maximum chunk size, it
+ # affects memory overhead of reading a response by this
+ # method in CPython.
+ # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
+ # chunk size that does not lead to an overflow error, but
+ # 256 MiB is a compromise.
+ max_chunk_amt = 2 ** 28
+ while amt is None or amt != 0:
+ if amt is not None:
+ chunk_amt = min(amt, max_chunk_amt)
+ amt -= chunk_amt
+ else:
+ chunk_amt = max_chunk_amt
+ data = self._fp.read(chunk_amt)
+ if not data:
+ break
+ buffer.write(data)
+ del data # to reduce peak memory usage by `max_chunk_amt`.
+ return buffer.getvalue()
+ else:
+ # StringIO doesn't like amt=None
+ return self._fp.read(amt) if amt is not None else self._fp.read()
+
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
@@ -510,13 +564,11 @@ class HTTPResponse(io.IOBase):
fp_closed = getattr(self._fp, "closed", False)
with self._error_catcher():
+ data = self._fp_read(amt) if not fp_closed else b""
if amt is None:
- # cStringIO doesn't like amt=None
- data = self._fp.read() if not fp_closed else b""
flush_decoder = True
else:
cache_content = False
- data = self._fp.read(amt) if not fp_closed else b""
if (
amt != 0 and not data
): # Platform-specific: Buggy versions of Python.
@@ -612,9 +664,21 @@ class HTTPResponse(io.IOBase):
# Backwards-compatibility methods for http.client.HTTPResponse
def getheaders(self):
+ warnings.warn(
+ "HTTPResponse.getheaders() is deprecated and will be removed "
+ "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
return self.headers
def getheader(self, name, default=None):
+ warnings.warn(
+ "HTTPResponse.getheader() is deprecated and will be removed "
+ "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
return self.headers.get(name, default)
# Backwards compatibility for http.cookiejar
diff --git a/third_party/python/urllib3/urllib3/util/connection.py b/third_party/python/urllib3/urllib3/util/connection.py
index cd57455748..6af1138f26 100644
--- a/third_party/python/urllib3/urllib3/util/connection.py
+++ b/third_party/python/urllib3/urllib3/util/connection.py
@@ -2,9 +2,8 @@ from __future__ import absolute_import
import socket
-from urllib3.exceptions import LocationParseError
-
from ..contrib import _appengine_environ
+from ..exceptions import LocationParseError
from ..packages import six
from .wait import NoWayToWaitForSocketError, wait_for_read
@@ -118,7 +117,7 @@ def allowed_gai_family():
def _has_ipv6(host):
- """ Returns True if the system can bind an IPv6 address. """
+ """Returns True if the system can bind an IPv6 address."""
sock = None
has_ipv6 = False
diff --git a/third_party/python/urllib3/urllib3/util/proxy.py b/third_party/python/urllib3/urllib3/util/proxy.py
index 34f884d5b3..2199cc7b7f 100644
--- a/third_party/python/urllib3/urllib3/util/proxy.py
+++ b/third_party/python/urllib3/urllib3/util/proxy.py
@@ -45,6 +45,7 @@ def create_proxy_ssl_context(
ssl_version=resolve_ssl_version(ssl_version),
cert_reqs=resolve_cert_reqs(cert_reqs),
)
+
if (
not ca_certs
and not ca_cert_dir
diff --git a/third_party/python/urllib3/urllib3/util/request.py b/third_party/python/urllib3/urllib3/util/request.py
index 25103383ec..b574b081e9 100644
--- a/third_party/python/urllib3/urllib3/util/request.py
+++ b/third_party/python/urllib3/urllib3/util/request.py
@@ -14,7 +14,10 @@ SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
ACCEPT_ENCODING = "gzip,deflate"
try:
- import brotli as _unused_module_brotli # noqa: F401
+ try:
+ import brotlicffi as _unused_module_brotli # noqa: F401
+ except ImportError:
+ import brotli as _unused_module_brotli # noqa: F401
except ImportError:
pass
else:
diff --git a/third_party/python/urllib3/urllib3/util/retry.py b/third_party/python/urllib3/urllib3/util/retry.py
index ee51f922f8..60ef6c4f3f 100644
--- a/third_party/python/urllib3/urllib3/util/retry.py
+++ b/third_party/python/urllib3/urllib3/util/retry.py
@@ -37,7 +37,7 @@ class _RetryMeta(type):
def DEFAULT_METHOD_WHITELIST(cls):
warnings.warn(
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
+ "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
DeprecationWarning,
)
return cls.DEFAULT_ALLOWED_METHODS
@@ -69,6 +69,24 @@ class _RetryMeta(type):
)
cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
+ @property
+ def BACKOFF_MAX(cls):
+ warnings.warn(
+ "Using 'Retry.BACKOFF_MAX' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_BACKOFF_MAX
+
+ @BACKOFF_MAX.setter
+ def BACKOFF_MAX(cls, value):
+ warnings.warn(
+ "Using 'Retry.BACKOFF_MAX' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_BACKOFF_MAX = value
+
@six.add_metaclass(_RetryMeta)
class Retry(object):
@@ -181,7 +199,7 @@ class Retry(object):
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
- than :attr:`Retry.BACKOFF_MAX`.
+ than :attr:`Retry.DEFAULT_BACKOFF_MAX`.
By default, backoff is disabled (set to 0).
@@ -217,10 +235,10 @@ class Retry(object):
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
#: Default headers to be used for ``remove_headers_on_redirect``
- DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
+ DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"])
#: Maximum backoff time.
- BACKOFF_MAX = 120
+ DEFAULT_BACKOFF_MAX = 120
def __init__(
self,
@@ -253,6 +271,7 @@ class Retry(object):
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
+ stacklevel=2,
)
allowed_methods = method_whitelist
if allowed_methods is _Default:
@@ -320,7 +339,7 @@ class Retry(object):
@classmethod
def from_int(cls, retries, redirect=True, default=None):
- """ Backwards-compatibility for the old retries format."""
+ """Backwards-compatibility for the old retries format."""
if retries is None:
retries = default if default is not None else cls.DEFAULT
@@ -347,7 +366,7 @@ class Retry(object):
return 0
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
- return min(self.BACKOFF_MAX, backoff_value)
+ return min(self.DEFAULT_BACKOFF_MAX, backoff_value)
def parse_retry_after(self, retry_after):
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
@@ -373,9 +392,9 @@ class Retry(object):
return seconds
def get_retry_after(self, response):
- """ Get the value of Retry-After in seconds. """
+ """Get the value of Retry-After in seconds."""
- retry_after = response.getheader("Retry-After")
+ retry_after = response.headers.get("Retry-After")
if retry_after is None:
return None
@@ -467,7 +486,7 @@ class Retry(object):
)
def is_exhausted(self):
- """ Are we out of retries? """
+ """Are we out of retries?"""
retry_counts = (
self.total,
self.connect,
diff --git a/third_party/python/urllib3/urllib3/util/ssl_.py b/third_party/python/urllib3/urllib3/util/ssl_.py
index 1cb5e7cdc1..8f867812a5 100644
--- a/third_party/python/urllib3/urllib3/util/ssl_.py
+++ b/third_party/python/urllib3/urllib3/util/ssl_.py
@@ -44,13 +44,21 @@ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_ba
try: # Test for SSL features
import ssl
- from ssl import HAS_SNI # Has SNI?
from ssl import CERT_REQUIRED, wrap_socket
+except ImportError:
+ pass
+
+try:
+ from ssl import HAS_SNI # Has SNI?
+except ImportError:
+ pass
+try:
from .ssltransport import SSLTransport
except ImportError:
pass
+
try: # Platform-specific: Python 3.6
from ssl import PROTOCOL_TLS
@@ -63,6 +71,11 @@ except ImportError:
except ImportError:
PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+try:
+ from ssl import PROTOCOL_TLS_CLIENT
+except ImportError:
+ PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
+
try:
from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
@@ -151,7 +164,7 @@ except ImportError:
"urllib3 from configuring SSL appropriately and may cause "
"certain SSL connections to fail. You can upgrade to a newer "
"version of Python to solve this. For more information, see "
- "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings",
InsecurePlatformWarning,
)
@@ -270,7 +283,11 @@ def create_urllib3_context(
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
- context = SSLContext(ssl_version or PROTOCOL_TLS)
+ # PROTOCOL_TLS is deprecated in Python 3.10
+ if not ssl_version or ssl_version == PROTOCOL_TLS:
+ ssl_version = PROTOCOL_TLS_CLIENT
+
+ context = SSLContext(ssl_version)
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
@@ -305,13 +322,25 @@ def create_urllib3_context(
) is not None:
context.post_handshake_auth = True
- context.verify_mode = cert_reqs
- if (
- getattr(context, "check_hostname", None) is not None
- ): # Platform-specific: Python 3.2
- # We do our own verification, including fingerprints and alternative
- # hostnames. So disable it here
- context.check_hostname = False
+ def disable_check_hostname():
+ if (
+ getattr(context, "check_hostname", None) is not None
+ ): # Platform-specific: Python 3.2
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
+
+ # The order of the below lines setting verify_mode and check_hostname
+ # matter due to safe-guards SSLContext has to prevent an SSLContext with
+ # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
+ # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
+ # or not so we don't know the initial state of the freshly created SSLContext.
+ if cert_reqs == ssl.CERT_REQUIRED:
+ context.verify_mode = cert_reqs
+ disable_check_hostname()
+ else:
+ disable_check_hostname()
+ context.verify_mode = cert_reqs
# Enable logging of TLS session keys via defacto standard environment variable
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
@@ -393,7 +422,7 @@ def ssl_wrap_socket(
try:
if hasattr(context, "set_alpn_protocols"):
context.set_alpn_protocols(ALPN_PROTOCOLS)
- except NotImplementedError:
+ except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
pass
# If we detect server_hostname is an IP address then the SNI
@@ -411,7 +440,7 @@ def ssl_wrap_socket(
"This may cause the server to present an incorrect TLS "
"certificate, which can cause validation failures. You can upgrade to "
"a newer version of Python to solve this. For more information, see "
- "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings",
SNIMissingWarning,
)
diff --git a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/_implementation.py b/third_party/python/urllib3/urllib3/util/ssl_match_hostname.py
index 689208d3c6..1dd950c489 100644
--- a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/_implementation.py
+++ b/third_party/python/urllib3/urllib3/util/ssl_match_hostname.py
@@ -9,7 +9,7 @@ import sys
# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
# system, use it to handle IPAddress ServerAltnames (this was added in
# python-3.5) otherwise only do DNS matching. This allows
-# backports.ssl_match_hostname to continue to be used in Python 2.7.
+# util.ssl_match_hostname to continue to be used in Python 2.7.
try:
import ipaddress
except ImportError:
@@ -78,7 +78,8 @@ def _dnsname_match(dn, hostname, max_wildcards=1):
def _to_unicode(obj):
if isinstance(obj, str) and sys.version_info < (3,):
- obj = unicode(obj, encoding="ascii", errors="strict")
+ # ignored flake8 # F821 to support python 2.7 function
+ obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821
return obj
@@ -111,11 +112,9 @@ def match_hostname(cert, hostname):
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(_to_unicode(hostname))
- except ValueError:
- # Not an IP address (common case)
- host_ip = None
- except UnicodeError:
- # Divergence from upstream: Have to deal with ipaddress not taking
+ except (UnicodeError, ValueError):
+ # ValueError: Not an IP address (common case)
+ # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
# byte strings. addresses should be all ascii, so we consider it not
# an ipaddress in this case
host_ip = None
@@ -123,7 +122,7 @@ def match_hostname(cert, hostname):
# Divergence from upstream: Make ipaddress library optional
if ipaddress is None:
host_ip = None
- else:
+ else: # Defensive
raise
dnsnames = []
san = cert.get("subjectAltName", ())
diff --git a/third_party/python/urllib3/urllib3/util/ssltransport.py b/third_party/python/urllib3/urllib3/util/ssltransport.py
index 1e41354f5d..4a7105d179 100644
--- a/third_party/python/urllib3/urllib3/util/ssltransport.py
+++ b/third_party/python/urllib3/urllib3/util/ssltransport.py
@@ -2,8 +2,8 @@ import io
import socket
import ssl
-from urllib3.exceptions import ProxySchemeUnsupported
-from urllib3.packages import six
+from ..exceptions import ProxySchemeUnsupported
+from ..packages import six
SSL_BLOCKSIZE = 16384
@@ -193,7 +193,7 @@ class SSLTransport:
raise
def _ssl_io_loop(self, func, *args):
- """ Performs an I/O loop between incoming/outgoing and the socket."""
+ """Performs an I/O loop between incoming/outgoing and the socket."""
should_loop = True
ret = None
diff --git a/third_party/python/urllib3/urllib3/util/timeout.py b/third_party/python/urllib3/urllib3/util/timeout.py
index ff69593b05..78e18a6272 100644
--- a/third_party/python/urllib3/urllib3/util/timeout.py
+++ b/third_party/python/urllib3/urllib3/util/timeout.py
@@ -2,9 +2,8 @@ from __future__ import absolute_import
import time
-# The default socket timeout, used by httplib to indicate that no timeout was
-# specified by the user
-from socket import _GLOBAL_DEFAULT_TIMEOUT
+# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout
from ..exceptions import TimeoutStateError
@@ -117,6 +116,10 @@ class Timeout(object):
__str__ = __repr__
@classmethod
+ def resolve_default_timeout(cls, timeout):
+ return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout
+
+ @classmethod
def _validate_timeout(cls, value, name):
"""Check that a timeout attribute is valid.
diff --git a/third_party/python/urllib3/urllib3/util/url.py b/third_party/python/urllib3/urllib3/util/url.py
index 6ff238fe3c..e5682d3be4 100644
--- a/third_party/python/urllib3/urllib3/util/url.py
+++ b/third_party/python/urllib3/urllib3/util/url.py
@@ -50,7 +50,7 @@ _variations = [
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
-UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
+UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~"
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
@@ -63,12 +63,12 @@ IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
-SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
+_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % (
REG_NAME_PAT,
IPV4_PAT,
IPV6_ADDRZ_PAT,
)
-SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL)
+_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
UNRESERVED_CHARS = set(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
@@ -279,6 +279,9 @@ def _normalize_host(host, scheme):
if scheme in NORMALIZABLE_SCHEMES:
is_ipv6 = IPV6_ADDRZ_RE.match(host)
if is_ipv6:
+ # IPv6 hosts of the form 'a::b%zone' are encoded in a URL as
+ # such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID
+ # separator as necessary to return a valid RFC 4007 scoped IP.
match = ZONE_ID_RE.search(host)
if match:
start, end = match.span(1)
@@ -300,7 +303,7 @@ def _normalize_host(host, scheme):
def _idna_encode(name):
- if name and any([ord(x) > 128 for x in name]):
+ if name and any(ord(x) >= 128 for x in name):
try:
import idna
except ImportError:
@@ -331,7 +334,7 @@ def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
- This parser is RFC 3986 compliant.
+ This parser is RFC 3986 and RFC 6874 compliant.
The parser logic and helper functions are based heavily on
work done in the ``rfc3986`` module.
@@ -365,7 +368,9 @@ def parse_url(url):
scheme = scheme.lower()
if authority:
- auth, host, port = SUBAUTHORITY_RE.match(authority).groups()
+ auth, _, host_port = authority.rpartition("@")
+ auth = auth or None
+ host, port = _HOST_PORT_RE.match(host_port).groups()
if auth and normalize_uri:
auth = _encode_invalid_chars(auth, USERINFO_CHARS)
if port == "":
diff --git a/third_party/python/urllib3/urllib3/util/wait.py b/third_party/python/urllib3/urllib3/util/wait.py
index c280646c7b..21b4590b3d 100644
--- a/third_party/python/urllib3/urllib3/util/wait.py
+++ b/third_party/python/urllib3/urllib3/util/wait.py
@@ -42,7 +42,6 @@ if sys.version_info >= (3, 5):
def _retry_on_intr(fn, timeout):
return fn(timeout)
-
else:
# Old and broken Pythons.
def _retry_on_intr(fn, timeout):