Skip to content

Commit 3f39bbd

Browse files
GeekMashergithub-actions[bot]
authored andcommitted
[chore]: Update vendored dependencies
1 parent 8be7e9f commit 3f39bbd

File tree

9 files changed

+202
-72
lines changed

9 files changed

+202
-72
lines changed

vendor/bin/normalizer

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/home/geekmasher/.local/share/virtualenvs/brew-dependency-submission-action-Gz_oAmoD/bin/python
1+
#!/home/runner/.local/share/virtualenvs/brew-dependency-submission-action-XUqNX2Tu/bin/python
22
# -*- coding: utf-8 -*-
33
import re
44
import sys

vendor/semantic_version/__init__.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,12 @@
77

88

99
__author__ = "Raphaël Barrois <raphael.barrois+semver@polytechnique.org>"
10-
__version__ = "2.10.0"
10+
try:
11+
# Python 3.8+
12+
from importlib.metadata import version
13+
14+
__version__ = version("semantic_version")
15+
except ImportError:
16+
import pkg_resources
17+
18+
__version__ = pkg_resources.get_distribution("semantic_version").version

vendor/urllib3/_version.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,5 +17,5 @@
1717
__version_tuple__: VERSION_TUPLE
1818
version_tuple: VERSION_TUPLE
1919

20-
__version__ = version = '2.4.0'
21-
__version_tuple__ = version_tuple = (2, 4, 0)
20+
__version__ = version = '2.5.0'
21+
__version_tuple__ = version_tuple = (2, 5, 0)

vendor/urllib3/connection.py

Lines changed: 87 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ class BaseSSLError(BaseException): # type: ignore[no-redef]
7474

7575
# When it comes time to update this value as a part of regular maintenance
7676
# (ie test_recent_date is failing) update it to ~6 months before the current date.
77-
RECENT_DATE = datetime.date(2023, 6, 1)
77+
RECENT_DATE = datetime.date(2025, 1, 1)
7878

7979
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
8080

@@ -232,45 +232,94 @@ def set_tunnel(
232232
super().set_tunnel(host, port=port, headers=headers)
233233
self._tunnel_scheme = scheme
234234

235-
if sys.version_info < (3, 11, 4):
236-
237-
def _tunnel(self) -> None:
238-
_MAXLINE = http.client._MAXLINE # type: ignore[attr-defined]
239-
connect = b"CONNECT %s:%d HTTP/1.0\r\n" % ( # type: ignore[str-format]
240-
self._tunnel_host.encode("ascii"), # type: ignore[union-attr]
241-
self._tunnel_port,
242-
)
243-
headers = [connect]
244-
for header, value in self._tunnel_headers.items(): # type: ignore[attr-defined]
245-
headers.append(f"{header}: {value}\r\n".encode("latin-1"))
246-
headers.append(b"\r\n")
247-
# Making a single send() call instead of one per line encourages
248-
# the host OS to use a more optimal packet size instead of
249-
# potentially emitting a series of small packets.
250-
self.send(b"".join(headers))
251-
del headers
252-
253-
response = self.response_class(self.sock, method=self._method) # type: ignore[attr-defined]
254-
try:
255-
(version, code, message) = response._read_status() # type: ignore[attr-defined]
256-
257-
if code != http.HTTPStatus.OK:
258-
self.close()
259-
raise OSError(f"Tunnel connection failed: {code} {message.strip()}")
260-
while True:
261-
line = response.fp.readline(_MAXLINE + 1)
262-
if len(line) > _MAXLINE:
263-
raise http.client.LineTooLong("header line")
264-
if not line:
265-
# for sites which EOF without sending a trailer
266-
break
267-
if line in (b"\r\n", b"\n", b""):
268-
break
235+
if sys.version_info < (3, 11, 9) or ((3, 12) <= sys.version_info < (3, 12, 3)):
236+
# Taken from python/cpython#100986 which was backported in 3.11.9 and 3.12.3.
237+
# When using connection_from_host, host will come without brackets.
238+
def _wrap_ipv6(self, ip: bytes) -> bytes:
239+
if b":" in ip and ip[0] != b"["[0]:
240+
return b"[" + ip + b"]"
241+
return ip
242+
243+
if sys.version_info < (3, 11, 9):
244+
# `_tunnel` copied from 3.11.13 backporting
245+
# https://github.com/python/cpython/commit/0d4026432591d43185568dd31cef6a034c4b9261
246+
# and https://github.com/python/cpython/commit/6fbc61070fda2ffb8889e77e3b24bca4249ab4d1
247+
def _tunnel(self) -> None:
248+
_MAXLINE = http.client._MAXLINE # type: ignore[attr-defined]
249+
connect = b"CONNECT %s:%d HTTP/1.0\r\n" % ( # type: ignore[str-format]
250+
self._wrap_ipv6(self._tunnel_host.encode("ascii")), # type: ignore[union-attr]
251+
self._tunnel_port,
252+
)
253+
headers = [connect]
254+
for header, value in self._tunnel_headers.items(): # type: ignore[attr-defined]
255+
headers.append(f"{header}: {value}\r\n".encode("latin-1"))
256+
headers.append(b"\r\n")
257+
# Making a single send() call instead of one per line encourages
258+
# the host OS to use a more optimal packet size instead of
259+
# potentially emitting a series of small packets.
260+
self.send(b"".join(headers))
261+
del headers
262+
263+
response = self.response_class(self.sock, method=self._method) # type: ignore[attr-defined]
264+
try:
265+
(version, code, message) = response._read_status() # type: ignore[attr-defined]
266+
267+
if code != http.HTTPStatus.OK:
268+
self.close()
269+
raise OSError(
270+
f"Tunnel connection failed: {code} {message.strip()}"
271+
)
272+
while True:
273+
line = response.fp.readline(_MAXLINE + 1)
274+
if len(line) > _MAXLINE:
275+
raise http.client.LineTooLong("header line")
276+
if not line:
277+
# for sites which EOF without sending a trailer
278+
break
279+
if line in (b"\r\n", b"\n", b""):
280+
break
281+
282+
if self.debuglevel > 0:
283+
print("header:", line.decode())
284+
finally:
285+
response.close()
286+
287+
elif (3, 12) <= sys.version_info < (3, 12, 3):
288+
# `_tunnel` copied from 3.12.11 backporting
289+
# https://github.com/python/cpython/commit/23aef575c7629abcd4aaf028ebd226fb41a4b3c8
290+
def _tunnel(self) -> None: # noqa: F811
291+
connect = b"CONNECT %s:%d HTTP/1.1\r\n" % ( # type: ignore[str-format]
292+
self._wrap_ipv6(self._tunnel_host.encode("idna")), # type: ignore[union-attr]
293+
self._tunnel_port,
294+
)
295+
headers = [connect]
296+
for header, value in self._tunnel_headers.items(): # type: ignore[attr-defined]
297+
headers.append(f"{header}: {value}\r\n".encode("latin-1"))
298+
headers.append(b"\r\n")
299+
# Making a single send() call instead of one per line encourages
300+
# the host OS to use a more optimal packet size instead of
301+
# potentially emitting a series of small packets.
302+
self.send(b"".join(headers))
303+
del headers
304+
305+
response = self.response_class(self.sock, method=self._method) # type: ignore[attr-defined]
306+
try:
307+
(version, code, message) = response._read_status() # type: ignore[attr-defined]
308+
309+
self._raw_proxy_headers = http.client._read_headers(response.fp) # type: ignore[attr-defined]
269310

270311
if self.debuglevel > 0:
271-
print("header:", line.decode())
272-
finally:
273-
response.close()
312+
for header in self._raw_proxy_headers:
313+
print("header:", header.decode())
314+
315+
if code != http.HTTPStatus.OK:
316+
self.close()
317+
raise OSError(
318+
f"Tunnel connection failed: {code} {message.strip()}"
319+
)
320+
321+
finally:
322+
response.close()
274323

275324
def connect(self) -> None:
276325
self.sock = self._new_conn()

vendor/urllib3/contrib/emscripten/fetch.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -573,6 +573,11 @@ def send_jspi_request(
573573
"method": request.method,
574574
"signal": js_abort_controller.signal,
575575
}
576+
# Node.js returns the whole response (unlike opaqueredirect in browsers),
577+
# so urllib3 can set `redirect: manual` to control redirects itself.
578+
# https://stackoverflow.com/a/78524615
579+
if _is_node_js():
580+
fetch_data["redirect"] = "manual"
576581
# Call JavaScript fetch (async api, returns a promise)
577582
fetcher_promise_js = js.fetch(request.url, _obj_from_dict(fetch_data))
578583
# Now suspend WebAssembly until we resolve that promise
@@ -693,6 +698,21 @@ def has_jspi() -> bool:
693698
return False
694699

695700

701+
def _is_node_js() -> bool:
702+
"""
703+
Check if we are in Node.js.
704+
705+
:return: True if we are in Node.js.
706+
:rtype: bool
707+
"""
708+
return (
709+
hasattr(js, "process")
710+
and hasattr(js.process, "release")
711+
# According to the Node.js documentation, the release name is always "node".
712+
and js.process.release.name == "node"
713+
)
714+
715+
696716
def streaming_ready() -> bool | None:
697717
if _fetcher:
698718
return _fetcher.streaming_ready

vendor/urllib3/poolmanager.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -203,6 +203,22 @@ def __init__(
203203
**connection_pool_kw: typing.Any,
204204
) -> None:
205205
super().__init__(headers)
206+
if "retries" in connection_pool_kw:
207+
retries = connection_pool_kw["retries"]
208+
if not isinstance(retries, Retry):
209+
# When Retry is initialized, raise_on_redirect is based
210+
# on a redirect boolean value.
211+
# But requests made via a pool manager always set
212+
# redirect to False, and raise_on_redirect always ends
213+
# up being False consequently.
214+
# Here we fix the issue by setting raise_on_redirect to
215+
# a value needed by the pool manager without considering
216+
# the redirect boolean.
217+
raise_on_redirect = retries is not False
218+
retries = Retry.from_int(retries, redirect=False)
219+
retries.raise_on_redirect = raise_on_redirect
220+
connection_pool_kw = connection_pool_kw.copy()
221+
connection_pool_kw["retries"] = retries
206222
self.connection_pool_kw = connection_pool_kw
207223

208224
self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool]
@@ -456,7 +472,7 @@ def urlopen( # type: ignore[override]
456472
kw["body"] = None
457473
kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
458474

459-
retries = kw.get("retries")
475+
retries = kw.get("retries", response.retries)
460476
if not isinstance(retries, Retry):
461477
retries = Retry.from_int(retries, redirect=redirect)
462478

vendor/urllib3/response.py

Lines changed: 53 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -26,23 +26,6 @@
2626
except ImportError:
2727
brotli = None
2828

29-
try:
30-
import zstandard as zstd
31-
except (AttributeError, ImportError, ValueError): # Defensive:
32-
HAS_ZSTD = False
33-
else:
34-
# The package 'zstandard' added the 'eof' property starting
35-
# in v0.18.0 which we require to ensure a complete and
36-
# valid zstd stream was fed into the ZstdDecoder.
37-
# See: https://github.com/urllib3/urllib3/pull/2624
38-
_zstd_version = tuple(
39-
map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
40-
)
41-
if _zstd_version < (0, 18): # Defensive:
42-
HAS_ZSTD = False
43-
else:
44-
HAS_ZSTD = True
45-
4629
from . import util
4730
from ._base_connection import _TYPE_BODY
4831
from ._collections import HTTPHeaderDict
@@ -163,27 +146,69 @@ def flush(self) -> bytes:
163146
return b""
164147

165148

166-
if HAS_ZSTD:
149+
try:
150+
# Python 3.14+
151+
from compression import zstd # type: ignore[import-not-found] # noqa: F401
152+
153+
HAS_ZSTD = True
167154

168155
class ZstdDecoder(ContentDecoder):
169156
def __init__(self) -> None:
170-
self._obj = zstd.ZstdDecompressor().decompressobj()
157+
self._obj = zstd.ZstdDecompressor()
171158

172159
def decompress(self, data: bytes) -> bytes:
173160
if not data:
174161
return b""
175162
data_parts = [self._obj.decompress(data)]
176163
while self._obj.eof and self._obj.unused_data:
177164
unused_data = self._obj.unused_data
178-
self._obj = zstd.ZstdDecompressor().decompressobj()
165+
self._obj = zstd.ZstdDecompressor()
179166
data_parts.append(self._obj.decompress(unused_data))
180167
return b"".join(data_parts)
181168

182169
def flush(self) -> bytes:
183-
ret = self._obj.flush() # note: this is a no-op
184170
if not self._obj.eof:
185171
raise DecodeError("Zstandard data is incomplete")
186-
return ret
172+
return b""
173+
174+
except ImportError:
175+
try:
176+
# Python 3.13 and earlier require the 'zstandard' module.
177+
import zstandard as zstd
178+
179+
# The package 'zstandard' added the 'eof' property starting
180+
# in v0.18.0 which we require to ensure a complete and
181+
# valid zstd stream was fed into the ZstdDecoder.
182+
# See: https://github.com/urllib3/urllib3/pull/2624
183+
_zstd_version = tuple(
184+
map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
185+
)
186+
if _zstd_version < (0, 18): # Defensive:
187+
raise ImportError("zstandard module doesn't have eof")
188+
except (AttributeError, ImportError, ValueError): # Defensive:
189+
HAS_ZSTD = False
190+
else:
191+
HAS_ZSTD = True
192+
193+
class ZstdDecoder(ContentDecoder): # type: ignore[no-redef]
194+
def __init__(self) -> None:
195+
self._obj = zstd.ZstdDecompressor().decompressobj()
196+
197+
def decompress(self, data: bytes) -> bytes:
198+
if not data:
199+
return b""
200+
data_parts = [self._obj.decompress(data)]
201+
while self._obj.eof and self._obj.unused_data:
202+
unused_data = self._obj.unused_data
203+
self._obj = zstd.ZstdDecompressor().decompressobj()
204+
data_parts.append(self._obj.decompress(unused_data))
205+
return b"".join(data_parts)
206+
207+
def flush(self) -> bytes:
208+
ret = self._obj.flush() # note: this is a no-op
209+
if not self._obj.eof:
210+
raise DecodeError("Zstandard data is incomplete")
211+
return ret # type: ignore[no-any-return]
187212

188213

189214
class MultiDecoder(ContentDecoder):
@@ -518,7 +543,7 @@ def readinto(self, b: bytearray) -> int:
518543
def getheaders(self) -> HTTPHeaderDict:
519544
warnings.warn(
520545
"HTTPResponse.getheaders() is deprecated and will be removed "
521-
"in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
546+
"in urllib3 v2.6.0. Instead access HTTPResponse.headers directly.",
522547
category=DeprecationWarning,
523548
stacklevel=2,
524549
)
@@ -527,7 +552,7 @@ def getheaders(self) -> HTTPHeaderDict:
527552
def getheader(self, name: str, default: str | None = None) -> str | None:
528553
warnings.warn(
529554
"HTTPResponse.getheader() is deprecated and will be removed "
530-
"in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
555+
"in urllib3 v2.6.0. Instead use HTTPResponse.headers.get(name, default).",
531556
category=DeprecationWarning,
532557
stacklevel=2,
533558
)
@@ -1075,6 +1100,10 @@ def readable(self) -> bool:
10751100
def shutdown(self) -> None:
10761101
if not self._sock_shutdown:
10771102
raise ValueError("Cannot shutdown socket as self._sock_shutdown is not set")
1103+
if self._connection is None:
1104+
raise RuntimeError(
1105+
"Cannot shutdown as connection has already been released to the pool"
1106+
)
10781107
self._sock_shutdown(socket.SHUT_RD)
10791108

10801109
def close(self) -> None:

vendor/urllib3/util/request.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,20 @@
2828
pass
2929
else:
3030
ACCEPT_ENCODING += ",br"
31+
3132
try:
32-
import zstandard as _unused_module_zstd # noqa: F401
33-
except ImportError:
34-
pass
35-
else:
33+
from compression import ( # type: ignore[import-not-found] # noqa: F401
34+
zstd as _unused_module_zstd,
35+
)
36+
3637
ACCEPT_ENCODING += ",zstd"
38+
except ImportError:
39+
try:
40+
import zstandard as _unused_module_zstd # noqa: F401
41+
42+
ACCEPT_ENCODING += ",zstd"
43+
except ImportError:
44+
pass
3745

3846

3947
class _TYPE_FAILEDTELL(Enum):

vendor/urllib3/util/ssl_.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ def create_urllib3_context(
289289
# keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED'
290290
warnings.warn(
291291
"'ssl_version' option is deprecated and will be "
292-
"removed in urllib3 v2.1.0. Instead use 'ssl_minimum_version'",
292+
"removed in urllib3 v2.6.0. Instead use 'ssl_minimum_version'",
293293
category=DeprecationWarning,
294294
stacklevel=2,
295295
)

0 commit comments

Comments
 (0)