forked from pool/python312
Compare commits
86 Commits
Author | SHA256 | Date | |
---|---|---|---|
ca6722de84 | |||
f9e1cf1836 | |||
b96f7f884b | |||
a91a0aca60 | |||
8dd75ac7e9 | |||
01d7c30105 | |||
3d0b1fd2f3 | |||
5ffcff295f | |||
df350a3d04 | |||
62a8d14b2c | |||
37c1d3d2e3 | |||
36a106a0a7 | |||
8b5d8bb101 | |||
1ee29c7d85 | |||
a5b17ad854 | |||
6441e5a86b | |||
af89117d93 | |||
b179411cca | |||
f1df581bc1 | |||
e728127a90 | |||
2410e499d4 | |||
730e031b5a | |||
9b369ae708 | |||
d915e370e5 | |||
3f073ea41b | |||
d2c62b9b77 | |||
1929c41f46 | |||
8d147e1486 | |||
3bf1e1a8e7 | |||
3a565bec26 | |||
ea7b8271b0 | |||
b814d70dca | |||
0a23865f82 | |||
4db7913729 | |||
07eef01e76 | |||
bbb6498fe3 | |||
9bf13da52a | |||
2cb6f30213 | |||
f894003382 | |||
fa963a9d40 | |||
1e0fc4ca6f | |||
c558688a19 | |||
584c05bad9 | |||
b11adbdea3 | |||
e82a230b70 | |||
e7906b91e2 | |||
07ecf72506 | |||
b45169abf8 | |||
32717178fc | |||
f7e695cbd6 | |||
0496c93f4b | |||
30f651fd15 | |||
eacdd5e9b5 | |||
bae099bfd7 | |||
c062335ad2 | |||
4fcdd05e86 | |||
24c111965b | |||
c4b3c6583b | |||
d058a99b8a | |||
9431cf257f | |||
e85ec7c286 | |||
652065b794 | |||
a7439aaf5b | |||
694498a6a8 | |||
8a08246ce9 | |||
b9104c7cad | |||
d5a3615b78 | |||
82050fef68 | |||
094ec27e0f | |||
f07b688f29 | |||
06a5cb31be | |||
803cb95998 | |||
cd88adc808 | |||
118ac765b0 | |||
2f2e126886 | |||
8c2f054df4 | |||
957ff77855 | |||
2aeb619628 | |||
38ff7e3150 | |||
ec208c83f9 | |||
e64f032e0a | |||
a00145be7f | |||
5d2f502703 | |||
c75ef22ae5 | |||
10154267fc | |||
456c5f3ff6 |
@ -1,474 +0,0 @@
|
|||||||
From 4a153a1d3b18803a684cd1bcc2cdf3ede3dbae19 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Victor Stinner <vstinner@python.org>
|
|
||||||
Date: Fri, 15 Dec 2023 16:10:40 +0100
|
|
||||||
Subject: [PATCH] [CVE-2023-27043] gh-102988: Reject malformed addresses in
|
|
||||||
email.parseaddr() (#111116)
|
|
||||||
|
|
||||||
Detect email address parsing errors and return empty tuple to
|
|
||||||
indicate the parsing error (old API). Add an optional 'strict'
|
|
||||||
parameter to getaddresses() and parseaddr() functions. Patch by
|
|
||||||
Thomas Dwyer.
|
|
||||||
|
|
||||||
Co-Authored-By: Thomas Dwyer <github@tomd.tel>
|
|
||||||
---
|
|
||||||
Doc/library/email.utils.rst | 19 -
|
|
||||||
Lib/email/utils.py | 151 +++++++-
|
|
||||||
Lib/test/test_email/test_email.py | 187 +++++++++-
|
|
||||||
Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst | 8
|
|
||||||
4 files changed, 344 insertions(+), 21 deletions(-)
|
|
||||||
create mode 100644 Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
|
||||||
|
|
||||||
--- a/Doc/library/email.utils.rst
|
|
||||||
+++ b/Doc/library/email.utils.rst
|
|
||||||
@@ -58,13 +58,18 @@ of the new API.
|
|
||||||
begins with angle brackets, they are stripped off.
|
|
||||||
|
|
||||||
|
|
||||||
-.. function:: parseaddr(address)
|
|
||||||
+.. function:: parseaddr(address, *, strict=True)
|
|
||||||
|
|
||||||
Parse address -- which should be the value of some address-containing field such
|
|
||||||
as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and
|
|
||||||
*email address* parts. Returns a tuple of that information, unless the parse
|
|
||||||
fails, in which case a 2-tuple of ``('', '')`` is returned.
|
|
||||||
|
|
||||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
|
||||||
+
|
|
||||||
+ .. versionchanged:: 3.13
|
|
||||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
|
||||||
+
|
|
||||||
|
|
||||||
.. function:: formataddr(pair, charset='utf-8')
|
|
||||||
|
|
||||||
@@ -82,12 +87,15 @@ of the new API.
|
|
||||||
Added the *charset* option.
|
|
||||||
|
|
||||||
|
|
||||||
-.. function:: getaddresses(fieldvalues)
|
|
||||||
+.. function:: getaddresses(fieldvalues, *, strict=True)
|
|
||||||
|
|
||||||
This method returns a list of 2-tuples of the form returned by ``parseaddr()``.
|
|
||||||
*fieldvalues* is a sequence of header field values as might be returned by
|
|
||||||
- :meth:`Message.get_all <email.message.Message.get_all>`. Here's a simple
|
|
||||||
- example that gets all the recipients of a message::
|
|
||||||
+ :meth:`Message.get_all <email.message.Message.get_all>`.
|
|
||||||
+
|
|
||||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
|
||||||
+
|
|
||||||
+ Here's a simple example that gets all the recipients of a message::
|
|
||||||
|
|
||||||
from email.utils import getaddresses
|
|
||||||
|
|
||||||
@@ -97,6 +105,9 @@ of the new API.
|
|
||||||
resent_ccs = msg.get_all('resent-cc', [])
|
|
||||||
all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
|
|
||||||
|
|
||||||
+ .. versionchanged:: 3.13
|
|
||||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
|
||||||
+
|
|
||||||
|
|
||||||
.. function:: parsedate(date)
|
|
||||||
|
|
||||||
--- a/Lib/email/utils.py
|
|
||||||
+++ b/Lib/email/utils.py
|
|
||||||
@@ -48,6 +48,7 @@ TICK = "'"
|
|
||||||
specialsre = re.compile(r'[][\\()<>@,:;".]')
|
|
||||||
escapesre = re.compile(r'[\\"]')
|
|
||||||
|
|
||||||
+
|
|
||||||
def _has_surrogates(s):
|
|
||||||
"""Return True if s may contain surrogate-escaped binary data."""
|
|
||||||
# This check is based on the fact that unless there are surrogates, utf8
|
|
||||||
@@ -106,12 +107,127 @@ def formataddr(pair, charset='utf-8'):
|
|
||||||
return address
|
|
||||||
|
|
||||||
|
|
||||||
+def _iter_escaped_chars(addr):
|
|
||||||
+ pos = 0
|
|
||||||
+ escape = False
|
|
||||||
+ for pos, ch in enumerate(addr):
|
|
||||||
+ if escape:
|
|
||||||
+ yield (pos, '\\' + ch)
|
|
||||||
+ escape = False
|
|
||||||
+ elif ch == '\\':
|
|
||||||
+ escape = True
|
|
||||||
+ else:
|
|
||||||
+ yield (pos, ch)
|
|
||||||
+ if escape:
|
|
||||||
+ yield (pos, '\\')
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _strip_quoted_realnames(addr):
|
|
||||||
+ """Strip real names between quotes."""
|
|
||||||
+ if '"' not in addr:
|
|
||||||
+ # Fast path
|
|
||||||
+ return addr
|
|
||||||
+
|
|
||||||
+ start = 0
|
|
||||||
+ open_pos = None
|
|
||||||
+ result = []
|
|
||||||
+ for pos, ch in _iter_escaped_chars(addr):
|
|
||||||
+ if ch == '"':
|
|
||||||
+ if open_pos is None:
|
|
||||||
+ open_pos = pos
|
|
||||||
+ else:
|
|
||||||
+ if start != open_pos:
|
|
||||||
+ result.append(addr[start:open_pos])
|
|
||||||
+ start = pos + 1
|
|
||||||
+ open_pos = None
|
|
||||||
|
|
||||||
-def getaddresses(fieldvalues):
|
|
||||||
- """Return a list of (REALNAME, EMAIL) for each fieldvalue."""
|
|
||||||
- all = COMMASPACE.join(str(v) for v in fieldvalues)
|
|
||||||
- a = _AddressList(all)
|
|
||||||
- return a.addresslist
|
|
||||||
+ if start < len(addr):
|
|
||||||
+ result.append(addr[start:])
|
|
||||||
+
|
|
||||||
+ return ''.join(result)
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+supports_strict_parsing = True
|
|
||||||
+
|
|
||||||
+def getaddresses(fieldvalues, *, strict=True):
|
|
||||||
+ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue.
|
|
||||||
+
|
|
||||||
+ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in
|
|
||||||
+ its place.
|
|
||||||
+
|
|
||||||
+ If strict is true, use a strict parser which rejects malformed inputs.
|
|
||||||
+ """
|
|
||||||
+
|
|
||||||
+ # If strict is true, if the resulting list of parsed addresses is greater
|
|
||||||
+ # than the number of fieldvalues in the input list, a parsing error has
|
|
||||||
+ # occurred and consequently a list containing a single empty 2-tuple [('',
|
|
||||||
+ # '')] is returned in its place. This is done to avoid invalid output.
|
|
||||||
+ #
|
|
||||||
+ # Malformed input: getaddresses(['alice@example.com <bob@example.com>'])
|
|
||||||
+ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')]
|
|
||||||
+ # Safe output: [('', '')]
|
|
||||||
+
|
|
||||||
+ if not strict:
|
|
||||||
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
|
|
||||||
+ a = _AddressList(all)
|
|
||||||
+ return a.addresslist
|
|
||||||
+
|
|
||||||
+ fieldvalues = [str(v) for v in fieldvalues]
|
|
||||||
+ fieldvalues = _pre_parse_validation(fieldvalues)
|
|
||||||
+ addr = COMMASPACE.join(fieldvalues)
|
|
||||||
+ a = _AddressList(addr)
|
|
||||||
+ result = _post_parse_validation(a.addresslist)
|
|
||||||
+
|
|
||||||
+ # Treat output as invalid if the number of addresses is not equal to the
|
|
||||||
+ # expected number of addresses.
|
|
||||||
+ n = 0
|
|
||||||
+ for v in fieldvalues:
|
|
||||||
+ # When a comma is used in the Real Name part it is not a deliminator.
|
|
||||||
+ # So strip those out before counting the commas.
|
|
||||||
+ v = _strip_quoted_realnames(v)
|
|
||||||
+ # Expected number of addresses: 1 + number of commas
|
|
||||||
+ n += 1 + v.count(',')
|
|
||||||
+ if len(result) != n:
|
|
||||||
+ return [('', '')]
|
|
||||||
+
|
|
||||||
+ return result
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _check_parenthesis(addr):
|
|
||||||
+ # Ignore parenthesis in quoted real names.
|
|
||||||
+ addr = _strip_quoted_realnames(addr)
|
|
||||||
+
|
|
||||||
+ opens = 0
|
|
||||||
+ for pos, ch in _iter_escaped_chars(addr):
|
|
||||||
+ if ch == '(':
|
|
||||||
+ opens += 1
|
|
||||||
+ elif ch == ')':
|
|
||||||
+ opens -= 1
|
|
||||||
+ if opens < 0:
|
|
||||||
+ return False
|
|
||||||
+ return (opens == 0)
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _pre_parse_validation(email_header_fields):
|
|
||||||
+ accepted_values = []
|
|
||||||
+ for v in email_header_fields:
|
|
||||||
+ if not _check_parenthesis(v):
|
|
||||||
+ v = "('', '')"
|
|
||||||
+ accepted_values.append(v)
|
|
||||||
+
|
|
||||||
+ return accepted_values
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _post_parse_validation(parsed_email_header_tuples):
|
|
||||||
+ accepted_values = []
|
|
||||||
+ # The parser would have parsed a correctly formatted domain-literal
|
|
||||||
+ # The existence of an [ after parsing indicates a parsing failure
|
|
||||||
+ for v in parsed_email_header_tuples:
|
|
||||||
+ if '[' in v[1]:
|
|
||||||
+ v = ('', '')
|
|
||||||
+ accepted_values.append(v)
|
|
||||||
+
|
|
||||||
+ return accepted_values
|
|
||||||
|
|
||||||
|
|
||||||
def _format_timetuple_and_zone(timetuple, zone):
|
|
||||||
@@ -205,16 +321,33 @@ def parsedate_to_datetime(data):
|
|
||||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))
|
|
||||||
|
|
||||||
|
|
||||||
-def parseaddr(addr):
|
|
||||||
+def parseaddr(addr, *, strict=True):
|
|
||||||
"""
|
|
||||||
Parse addr into its constituent realname and email address parts.
|
|
||||||
|
|
||||||
Return a tuple of realname and email address, unless the parse fails, in
|
|
||||||
which case return a 2-tuple of ('', '').
|
|
||||||
+
|
|
||||||
+ If strict is True, use a strict parser which rejects malformed inputs.
|
|
||||||
"""
|
|
||||||
- addrs = _AddressList(addr).addresslist
|
|
||||||
- if not addrs:
|
|
||||||
- return '', ''
|
|
||||||
+ if not strict:
|
|
||||||
+ addrs = _AddressList(addr).addresslist
|
|
||||||
+ if not addrs:
|
|
||||||
+ return ('', '')
|
|
||||||
+ return addrs[0]
|
|
||||||
+
|
|
||||||
+ if isinstance(addr, list):
|
|
||||||
+ addr = addr[0]
|
|
||||||
+
|
|
||||||
+ if not isinstance(addr, str):
|
|
||||||
+ return ('', '')
|
|
||||||
+
|
|
||||||
+ addr = _pre_parse_validation([addr])[0]
|
|
||||||
+ addrs = _post_parse_validation(_AddressList(addr).addresslist)
|
|
||||||
+
|
|
||||||
+ if not addrs or len(addrs) > 1:
|
|
||||||
+ return ('', '')
|
|
||||||
+
|
|
||||||
return addrs[0]
|
|
||||||
|
|
||||||
|
|
||||||
--- a/Lib/test/test_email/test_email.py
|
|
||||||
+++ b/Lib/test/test_email/test_email.py
|
|
||||||
@@ -16,6 +16,7 @@ from unittest.mock import patch
|
|
||||||
|
|
||||||
import email
|
|
||||||
import email.policy
|
|
||||||
+import email.utils
|
|
||||||
|
|
||||||
from email.charset import Charset
|
|
||||||
from email.generator import Generator, DecodedGenerator, BytesGenerator
|
|
||||||
@@ -3352,15 +3353,137 @@ Foo
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
+ def test_parsing_errors(self):
|
|
||||||
+ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056"""
|
|
||||||
+ alice = 'alice@example.org'
|
|
||||||
+ bob = 'bob@example.com'
|
|
||||||
+ empty = ('', '')
|
|
||||||
+
|
|
||||||
+ # Test utils.getaddresses() and utils.parseaddr() on malformed email
|
|
||||||
+ # addresses: default behavior (strict=True) rejects malformed address,
|
|
||||||
+ # and strict=False which tolerates malformed address.
|
|
||||||
+ for invalid_separator, expected_non_strict in (
|
|
||||||
+ ('(', [(f'<{bob}>', alice)]),
|
|
||||||
+ (')', [('', alice), empty, ('', bob)]),
|
|
||||||
+ ('<', [('', alice), empty, ('', bob), empty]),
|
|
||||||
+ ('>', [('', alice), empty, ('', bob)]),
|
|
||||||
+ ('[', [('', f'{alice}[<{bob}>]')]),
|
|
||||||
+ (']', [('', alice), empty, ('', bob)]),
|
|
||||||
+ ('@', [empty, empty, ('', bob)]),
|
|
||||||
+ (';', [('', alice), empty, ('', bob)]),
|
|
||||||
+ (':', [('', alice), ('', bob)]),
|
|
||||||
+ ('.', [('', alice + '.'), ('', bob)]),
|
|
||||||
+ ('"', [('', alice), ('', f'<{bob}>')]),
|
|
||||||
+ ):
|
|
||||||
+ address = f'{alice}{invalid_separator}<{bob}>'
|
|
||||||
+ with self.subTest(address=address):
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]),
|
|
||||||
+ [empty])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ expected_non_strict)
|
|
||||||
+
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]),
|
|
||||||
+ empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Comma (',') is treated differently depending on strict parameter.
|
|
||||||
+ # Comma without quotes.
|
|
||||||
+ address = f'{alice},<{bob}>'
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]),
|
|
||||||
+ [('', alice), ('', bob)])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ [('', alice), ('', bob)])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]),
|
|
||||||
+ empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Real name between quotes containing comma.
|
|
||||||
+ address = '"Alice, alice@example.org" <bob@example.com>'
|
|
||||||
+ expected_strict = ('Alice, alice@example.org', 'bob@example.com')
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Valid parenthesis in comments.
|
|
||||||
+ address = 'alice@example.org (Alice)'
|
|
||||||
+ expected_strict = ('Alice', 'alice@example.org')
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Invalid parenthesis in comments.
|
|
||||||
+ address = 'alice@example.org )Alice('
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]), [empty])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ [('', 'alice@example.org'), ('', ''), ('', 'Alice')])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Two addresses with quotes separated by comma.
|
|
||||||
+ address = '"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>'
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]),
|
|
||||||
+ [('Jane Doe', 'jane@example.net'),
|
|
||||||
+ ('John Doe', 'john@example.net')])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ [('Jane Doe', 'jane@example.net'),
|
|
||||||
+ ('John Doe', 'john@example.net')])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Test email.utils.supports_strict_parsing attribute
|
|
||||||
+ self.assertEqual(email.utils.supports_strict_parsing, True)
|
|
||||||
+
|
|
||||||
def test_getaddresses_nasty(self):
|
|
||||||
- eq = self.assertEqual
|
|
||||||
- eq(utils.getaddresses(['foo: ;']), [('', '')])
|
|
||||||
- eq(utils.getaddresses(
|
|
||||||
- ['[]*-- =~$']),
|
|
||||||
- [('', ''), ('', ''), ('', '*--')])
|
|
||||||
- eq(utils.getaddresses(
|
|
||||||
- ['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
|
|
||||||
- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
|
|
||||||
+ for addresses, expected in (
|
|
||||||
+ (['"Sürname, Firstname" <to@example.com>'],
|
|
||||||
+ [('Sürname, Firstname', 'to@example.com')]),
|
|
||||||
+
|
|
||||||
+ (['foo: ;'],
|
|
||||||
+ [('', '')]),
|
|
||||||
+
|
|
||||||
+ (['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>'],
|
|
||||||
+ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]),
|
|
||||||
+
|
|
||||||
+ ([r'Pete(A nice \) chap) <pete(his account)@silly.test(his host)>'],
|
|
||||||
+ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]),
|
|
||||||
+
|
|
||||||
+ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'],
|
|
||||||
+ [('', '')]),
|
|
||||||
+
|
|
||||||
+ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'],
|
|
||||||
+ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]),
|
|
||||||
+
|
|
||||||
+ (['John Doe <jdoe@machine(comment). example>'],
|
|
||||||
+ [('John Doe (comment)', 'jdoe@machine.example')]),
|
|
||||||
+
|
|
||||||
+ (['"Mary Smith: Personal Account" <smith@home.example>'],
|
|
||||||
+ [('Mary Smith: Personal Account', 'smith@home.example')]),
|
|
||||||
+
|
|
||||||
+ (['Undisclosed recipients:;'],
|
|
||||||
+ [('', '')]),
|
|
||||||
+
|
|
||||||
+ ([r'<boss@nil.test>, "Giant; \"Big\" Box" <bob@example.net>'],
|
|
||||||
+ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]),
|
|
||||||
+ ):
|
|
||||||
+ with self.subTest(addresses=addresses):
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses),
|
|
||||||
+ expected)
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
|
||||||
+ expected)
|
|
||||||
+
|
|
||||||
+ addresses = ['[]*-- =~$']
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses),
|
|
||||||
+ [('', '')])
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
|
||||||
+ [('', ''), ('', ''), ('', '*--')])
|
|
||||||
|
|
||||||
def test_getaddresses_embedded_comment(self):
|
|
||||||
"""Test proper handling of a nested comment"""
|
|
||||||
@@ -3551,6 +3674,54 @@ multipart/report
|
|
||||||
m = cls(*constructor, policy=email.policy.default)
|
|
||||||
self.assertIs(m.policy, email.policy.default)
|
|
||||||
|
|
||||||
+ def test_iter_escaped_chars(self):
|
|
||||||
+ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')),
|
|
||||||
+ [(0, 'a'),
|
|
||||||
+ (2, '\\\\'),
|
|
||||||
+ (3, 'b'),
|
|
||||||
+ (5, '\\"'),
|
|
||||||
+ (6, 'c'),
|
|
||||||
+ (8, '\\\\'),
|
|
||||||
+ (9, '"'),
|
|
||||||
+ (10, 'd')])
|
|
||||||
+ self.assertEqual(list(utils._iter_escaped_chars('a\\')),
|
|
||||||
+ [(0, 'a'), (1, '\\')])
|
|
||||||
+
|
|
||||||
+ def test_strip_quoted_realnames(self):
|
|
||||||
+ def check(addr, expected):
|
|
||||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), expected)
|
|
||||||
+
|
|
||||||
+ check('"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>',
|
|
||||||
+ ' <jane@example.net>, <john@example.net>')
|
|
||||||
+ check(r'"Jane \"Doe\"." <jane@example.net>',
|
|
||||||
+ ' <jane@example.net>')
|
|
||||||
+
|
|
||||||
+ # special cases
|
|
||||||
+ check(r'before"name"after', 'beforeafter')
|
|
||||||
+ check(r'before"name"', 'before')
|
|
||||||
+ check(r'b"name"', 'b') # single char
|
|
||||||
+ check(r'"name"after', 'after')
|
|
||||||
+ check(r'"name"a', 'a') # single char
|
|
||||||
+ check(r'"name"', '')
|
|
||||||
+
|
|
||||||
+ # no change
|
|
||||||
+ for addr in (
|
|
||||||
+ 'Jane Doe <jane@example.net>, John Doe <john@example.net>',
|
|
||||||
+ 'lone " quote',
|
|
||||||
+ ):
|
|
||||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), addr)
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+ def test_check_parenthesis(self):
|
|
||||||
+ addr = 'alice@example.net'
|
|
||||||
+ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)'))
|
|
||||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice('))
|
|
||||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))'))
|
|
||||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)'))
|
|
||||||
+
|
|
||||||
+ # Ignore real name between quotes
|
|
||||||
+ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}'))
|
|
||||||
+
|
|
||||||
|
|
||||||
# Test the iterator/generators
|
|
||||||
class TestIterators(TestEmailBase):
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
|
||||||
@@ -0,0 +1,8 @@
|
|
||||||
+:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now
|
|
||||||
+return ``('', '')`` 2-tuples in more situations where invalid email
|
|
||||||
+addresses are encountered instead of potentially inaccurate values. Add
|
|
||||||
+optional *strict* parameter to these two functions: use ``strict=False`` to
|
|
||||||
+get the old behavior, accept malformed inputs.
|
|
||||||
+``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check
|
|
||||||
+if the *strict* paramater is available. Patch by Thomas Dwyer and Victor
|
|
||||||
+Stinner to improve the CVE-2023-27043 fix.
|
|
@ -1,67 +0,0 @@
|
|||||||
Index: Python-3.12.3/Lib/test/test_xml_etree.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.3.orig/Lib/test/test_xml_etree.py
|
|
||||||
+++ Python-3.12.3/Lib/test/test_xml_etree.py
|
|
||||||
@@ -121,6 +121,11 @@ ATTLIST_XML = """\
|
|
||||||
</foo>
|
|
||||||
"""
|
|
||||||
|
|
||||||
+IS_SLE_15_6 = os.environ.get("SLE_VERSION", "") == "0150600"
|
|
||||||
+fails_with_expat_2_6_0 = (unittest.expectedFailure
|
|
||||||
+ # 2.4 version patched in SLE
|
|
||||||
+ if IS_SLE_15_6 and pyexpat.version_info >= (2, 4, 0) else
|
|
||||||
+ lambda test: test)
|
|
||||||
def checkwarnings(*filters, quiet=False):
|
|
||||||
def decorator(test):
|
|
||||||
def newtest(*args, **kwargs):
|
|
||||||
@@ -1424,9 +1429,11 @@ class XMLPullParserTest(unittest.TestCas
|
|
||||||
self.assert_event_tags(parser, [('end', 'root')])
|
|
||||||
self.assertIsNone(parser.close())
|
|
||||||
|
|
||||||
+ @fails_with_expat_2_6_0
|
|
||||||
def test_simple_xml_chunk_1(self):
|
|
||||||
self.test_simple_xml(chunk_size=1, flush=True)
|
|
||||||
|
|
||||||
+ @fails_with_expat_2_6_0
|
|
||||||
def test_simple_xml_chunk_5(self):
|
|
||||||
self.test_simple_xml(chunk_size=5, flush=True)
|
|
||||||
|
|
||||||
@@ -1651,6 +1658,9 @@ class XMLPullParserTest(unittest.TestCas
|
|
||||||
|
|
||||||
self.assert_event_tags(parser, [('end', 'doc')])
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ f'Expat {pyexpat.version_info} does not '
|
|
||||||
+ 'support reparse deferral')
|
|
||||||
def test_flush_reparse_deferral_disabled(self):
|
|
||||||
parser = ET.XMLPullParser(events=('start', 'end'))
|
|
||||||
|
|
||||||
Index: Python-3.12.3/Lib/test/test_sax.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.3.orig/Lib/test/test_sax.py
|
|
||||||
+++ Python-3.12.3/Lib/test/test_sax.py
|
|
||||||
@@ -1240,6 +1240,9 @@ class ExpatReaderTest(XmlTestBase):
|
|
||||||
|
|
||||||
self.assertEqual(result.getvalue(), start + b"<doc></doc>")
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ f'Expat {pyexpat.version_info} does not '
|
|
||||||
+ 'support reparse deferral')
|
|
||||||
def test_flush_reparse_deferral_disabled(self):
|
|
||||||
result = BytesIO()
|
|
||||||
xmlgen = XMLGenerator(result)
|
|
||||||
Index: Python-3.12.3/Lib/test/test_pyexpat.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.3.orig/Lib/test/test_pyexpat.py
|
|
||||||
+++ Python-3.12.3/Lib/test/test_pyexpat.py
|
|
||||||
@@ -794,6 +794,10 @@ class ReparseDeferralTest(unittest.TestC
|
|
||||||
self.assertEqual(started, ['doc'])
|
|
||||||
|
|
||||||
def test_reparse_deferral_disabled(self):
|
|
||||||
+ if expat.version_info < (2, 6, 0):
|
|
||||||
+ self.skipTest(f'Expat {expat.version_info} does not '
|
|
||||||
+ 'support reparse deferral')
|
|
||||||
+
|
|
||||||
started = []
|
|
||||||
|
|
||||||
def start_element(name, _):
|
|
@ -1,171 +0,0 @@
|
|||||||
---
|
|
||||||
Lib/tempfile.py | 16 +
|
|
||||||
Lib/test/test_tempfile.py | 113 ++++++++++
|
|
||||||
Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst | 2
|
|
||||||
3 files changed, 131 insertions(+)
|
|
||||||
|
|
||||||
Index: Python-3.12.4/Lib/tempfile.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.4.orig/Lib/tempfile.py
|
|
||||||
+++ Python-3.12.4/Lib/tempfile.py
|
|
||||||
@@ -285,6 +285,22 @@ def _resetperms(path):
|
|
||||||
_dont_follow_symlinks(chflags, path, 0)
|
|
||||||
_dont_follow_symlinks(_os.chmod, path, 0o700)
|
|
||||||
|
|
||||||
+def _dont_follow_symlinks(func, path, *args):
|
|
||||||
+ # Pass follow_symlinks=False, unless not supported on this platform.
|
|
||||||
+ if func in _os.supports_follow_symlinks:
|
|
||||||
+ func(path, *args, follow_symlinks=False)
|
|
||||||
+ elif _os.name == 'nt' or not _os.path.islink(path):
|
|
||||||
+ func(path, *args)
|
|
||||||
+
|
|
||||||
+def _resetperms(path):
|
|
||||||
+ try:
|
|
||||||
+ chflags = _os.chflags
|
|
||||||
+ except AttributeError:
|
|
||||||
+ pass
|
|
||||||
+ else:
|
|
||||||
+ _dont_follow_symlinks(chflags, path, 0)
|
|
||||||
+ _dont_follow_symlinks(_os.chmod, path, 0o700)
|
|
||||||
+
|
|
||||||
|
|
||||||
# User visible interfaces.
|
|
||||||
|
|
||||||
Index: Python-3.12.4/Lib/test/test_tempfile.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.4.orig/Lib/test/test_tempfile.py
|
|
||||||
+++ Python-3.12.4/Lib/test/test_tempfile.py
|
|
||||||
@@ -1803,6 +1803,103 @@ class TestTemporaryDirectory(BaseTestCas
|
|
||||||
new_flags = os.stat(dir1).st_flags
|
|
||||||
self.assertEqual(new_flags, old_flags)
|
|
||||||
|
|
||||||
+ @os_helper.skip_unless_symlink
|
|
||||||
+ def test_cleanup_with_symlink_modes(self):
|
|
||||||
+ # cleanup() should not follow symlinks when fixing mode bits (#91133)
|
|
||||||
+ with self.do_create(recurse=0) as d2:
|
|
||||||
+ file1 = os.path.join(d2, 'file1')
|
|
||||||
+ open(file1, 'wb').close()
|
|
||||||
+ dir1 = os.path.join(d2, 'dir1')
|
|
||||||
+ os.mkdir(dir1)
|
|
||||||
+ for mode in range(8):
|
|
||||||
+ mode <<= 6
|
|
||||||
+ with self.subTest(mode=format(mode, '03o')):
|
|
||||||
+ def test(target, target_is_directory):
|
|
||||||
+ d1 = self.do_create(recurse=0)
|
|
||||||
+ symlink = os.path.join(d1.name, 'symlink')
|
|
||||||
+ os.symlink(target, symlink,
|
|
||||||
+ target_is_directory=target_is_directory)
|
|
||||||
+ try:
|
|
||||||
+ os.chmod(symlink, mode, follow_symlinks=False)
|
|
||||||
+ except NotImplementedError:
|
|
||||||
+ pass
|
|
||||||
+ try:
|
|
||||||
+ os.chmod(symlink, mode)
|
|
||||||
+ except FileNotFoundError:
|
|
||||||
+ pass
|
|
||||||
+ os.chmod(d1.name, mode)
|
|
||||||
+ d1.cleanup()
|
|
||||||
+ self.assertFalse(os.path.exists(d1.name))
|
|
||||||
+
|
|
||||||
+ with self.subTest('nonexisting file'):
|
|
||||||
+ test('nonexisting', target_is_directory=False)
|
|
||||||
+ with self.subTest('nonexisting dir'):
|
|
||||||
+ test('nonexisting', target_is_directory=True)
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing file'):
|
|
||||||
+ os.chmod(file1, mode)
|
|
||||||
+ old_mode = os.stat(file1).st_mode
|
|
||||||
+ test(file1, target_is_directory=False)
|
|
||||||
+ new_mode = os.stat(file1).st_mode
|
|
||||||
+ self.assertEqual(new_mode, old_mode,
|
|
||||||
+ '%03o != %03o' % (new_mode, old_mode))
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing dir'):
|
|
||||||
+ os.chmod(dir1, mode)
|
|
||||||
+ old_mode = os.stat(dir1).st_mode
|
|
||||||
+ test(dir1, target_is_directory=True)
|
|
||||||
+ new_mode = os.stat(dir1).st_mode
|
|
||||||
+ self.assertEqual(new_mode, old_mode,
|
|
||||||
+ '%03o != %03o' % (new_mode, old_mode))
|
|
||||||
+
|
|
||||||
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
|
|
||||||
+ @os_helper.skip_unless_symlink
|
|
||||||
+ def test_cleanup_with_symlink_flags(self):
|
|
||||||
+ # cleanup() should not follow symlinks when fixing flags (#91133)
|
|
||||||
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
|
|
||||||
+ self.check_flags(flags)
|
|
||||||
+
|
|
||||||
+ with self.do_create(recurse=0) as d2:
|
|
||||||
+ file1 = os.path.join(d2, 'file1')
|
|
||||||
+ open(file1, 'wb').close()
|
|
||||||
+ dir1 = os.path.join(d2, 'dir1')
|
|
||||||
+ os.mkdir(dir1)
|
|
||||||
+ def test(target, target_is_directory):
|
|
||||||
+ d1 = self.do_create(recurse=0)
|
|
||||||
+ symlink = os.path.join(d1.name, 'symlink')
|
|
||||||
+ os.symlink(target, symlink,
|
|
||||||
+ target_is_directory=target_is_directory)
|
|
||||||
+ try:
|
|
||||||
+ os.chflags(symlink, flags, follow_symlinks=False)
|
|
||||||
+ except NotImplementedError:
|
|
||||||
+ pass
|
|
||||||
+ try:
|
|
||||||
+ os.chflags(symlink, flags)
|
|
||||||
+ except FileNotFoundError:
|
|
||||||
+ pass
|
|
||||||
+ os.chflags(d1.name, flags)
|
|
||||||
+ d1.cleanup()
|
|
||||||
+ self.assertFalse(os.path.exists(d1.name))
|
|
||||||
+
|
|
||||||
+ with self.subTest('nonexisting file'):
|
|
||||||
+ test('nonexisting', target_is_directory=False)
|
|
||||||
+ with self.subTest('nonexisting dir'):
|
|
||||||
+ test('nonexisting', target_is_directory=True)
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing file'):
|
|
||||||
+ os.chflags(file1, flags)
|
|
||||||
+ old_flags = os.stat(file1).st_flags
|
|
||||||
+ test(file1, target_is_directory=False)
|
|
||||||
+ new_flags = os.stat(file1).st_flags
|
|
||||||
+ self.assertEqual(new_flags, old_flags)
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing dir'):
|
|
||||||
+ os.chflags(dir1, flags)
|
|
||||||
+ old_flags = os.stat(dir1).st_flags
|
|
||||||
+ test(dir1, target_is_directory=True)
|
|
||||||
+ new_flags = os.stat(dir1).st_flags
|
|
||||||
+ self.assertEqual(new_flags, old_flags)
|
|
||||||
+
|
|
||||||
@support.cpython_only
|
|
||||||
def test_del_on_collection(self):
|
|
||||||
# A TemporaryDirectory is deleted when garbage collected
|
|
||||||
@@ -1977,6 +2074,22 @@ class TestTemporaryDirectory(BaseTestCas
|
|
||||||
|
|
||||||
def check_flags(self, flags):
|
|
||||||
# skip the test if these flags are not supported (ex: FreeBSD 13)
|
|
||||||
+ filename = os_helper.TESTFN
|
|
||||||
+ try:
|
|
||||||
+ open(filename, "w").close()
|
|
||||||
+ try:
|
|
||||||
+ os.chflags(filename, flags)
|
|
||||||
+ except OSError as exc:
|
|
||||||
+ # "OSError: [Errno 45] Operation not supported"
|
|
||||||
+ self.skipTest(f"chflags() doesn't support flags "
|
|
||||||
+ f"{flags:#b}: {exc}")
|
|
||||||
+ else:
|
|
||||||
+ os.chflags(filename, 0)
|
|
||||||
+ finally:
|
|
||||||
+ os_helper.unlink(filename)
|
|
||||||
+
|
|
||||||
+ def check_flags(self, flags):
|
|
||||||
+ # skip the test if these flags are not supported (ex: FreeBSD 13)
|
|
||||||
filename = os_helper.TESTFN
|
|
||||||
try:
|
|
||||||
open(filename, "w").close()
|
|
||||||
Index: Python-3.12.4/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
|
||||||
===================================================================
|
|
||||||
--- /dev/null
|
|
||||||
+++ Python-3.12.4/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
|
||||||
@@ -0,0 +1,2 @@
|
|
||||||
+Fix a bug in :class:`tempfile.TemporaryDirectory` cleanup, which now no longer
|
|
||||||
+dereferences symlinks when working around file system permission errors.
|
|
@ -1,148 +0,0 @@
|
|||||||
---
|
|
||||||
Lib/test/test_zipfile/_path/test_path.py | 78 ++++++++++
|
|
||||||
Lib/zipfile/_path/__init__.py | 18 ++
|
|
||||||
Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst | 1
|
|
||||||
Misc/NEWS.d/next/Library/2024-08-26-13-45-20.gh-issue-123270.gXHvNJ.rst | 3
|
|
||||||
4 files changed, 98 insertions(+), 2 deletions(-)
|
|
||||||
|
|
||||||
--- a/Lib/test/test_zipfile/_path/test_path.py
|
|
||||||
+++ b/Lib/test/test_zipfile/_path/test_path.py
|
|
||||||
@@ -4,6 +4,7 @@ import contextlib
|
|
||||||
import pathlib
|
|
||||||
import pickle
|
|
||||||
import sys
|
|
||||||
+import time
|
|
||||||
import unittest
|
|
||||||
import zipfile
|
|
||||||
|
|
||||||
@@ -577,3 +578,80 @@ class TestPath(unittest.TestCase):
|
|
||||||
zipfile.Path(alpharep)
|
|
||||||
with self.assertRaises(KeyError):
|
|
||||||
alpharep.getinfo('does-not-exist')
|
|
||||||
+
|
|
||||||
+ def test_malformed_paths(self):
|
|
||||||
+ """
|
|
||||||
+ Path should handle malformed paths gracefully.
|
|
||||||
+
|
|
||||||
+ Paths with leading slashes are not visible.
|
|
||||||
+
|
|
||||||
+ Paths with dots are treated like regular files.
|
|
||||||
+ """
|
|
||||||
+ data = io.BytesIO()
|
|
||||||
+ zf = zipfile.ZipFile(data, "w")
|
|
||||||
+ zf.writestr("/one-slash.txt", b"content")
|
|
||||||
+ zf.writestr("//two-slash.txt", b"content")
|
|
||||||
+ zf.writestr("../parent.txt", b"content")
|
|
||||||
+ zf.filename = ''
|
|
||||||
+ root = zipfile.Path(zf)
|
|
||||||
+ assert list(map(str, root.iterdir())) == ['../']
|
|
||||||
+ assert root.joinpath('..').joinpath('parent.txt').read_bytes() == b'content'
|
|
||||||
+
|
|
||||||
+ def test_unsupported_names(self):
|
|
||||||
+ """
|
|
||||||
+ Path segments with special characters are readable.
|
|
||||||
+
|
|
||||||
+ On some platforms or file systems, characters like
|
|
||||||
+ ``:`` and ``?`` are not allowed, but they are valid
|
|
||||||
+ in the zip file.
|
|
||||||
+ """
|
|
||||||
+ data = io.BytesIO()
|
|
||||||
+ zf = zipfile.ZipFile(data, "w")
|
|
||||||
+ zf.writestr("path?", b"content")
|
|
||||||
+ zf.writestr("V: NMS.flac", b"fLaC...")
|
|
||||||
+ zf.filename = ''
|
|
||||||
+ root = zipfile.Path(zf)
|
|
||||||
+ contents = root.iterdir()
|
|
||||||
+ assert next(contents).name == 'path?'
|
|
||||||
+ assert next(contents).name == 'V: NMS.flac'
|
|
||||||
+ assert root.joinpath('V: NMS.flac').read_bytes() == b"fLaC..."
|
|
||||||
+
|
|
||||||
+ def test_backslash_not_separator(self):
|
|
||||||
+ """
|
|
||||||
+ In a zip file, backslashes are not separators.
|
|
||||||
+ """
|
|
||||||
+ data = io.BytesIO()
|
|
||||||
+ zf = zipfile.ZipFile(data, "w")
|
|
||||||
+ zf.writestr(DirtyZipInfo.for_name("foo\\bar", zf), b"content")
|
|
||||||
+ zf.filename = ''
|
|
||||||
+ root = zipfile.Path(zf)
|
|
||||||
+ (first,) = root.iterdir()
|
|
||||||
+ assert not first.is_dir()
|
|
||||||
+ assert first.name == 'foo\\bar'
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+class DirtyZipInfo(zipfile.ZipInfo):
|
|
||||||
+ """
|
|
||||||
+ Bypass name sanitization.
|
|
||||||
+ """
|
|
||||||
+
|
|
||||||
+ def __init__(self, filename, *args, **kwargs):
|
|
||||||
+ super().__init__(filename, *args, **kwargs)
|
|
||||||
+ self.filename = filename
|
|
||||||
+
|
|
||||||
+ @classmethod
|
|
||||||
+ def for_name(cls, name, archive):
|
|
||||||
+ """
|
|
||||||
+ Construct the same way that ZipFile.writestr does.
|
|
||||||
+
|
|
||||||
+ TODO: extract this functionality and re-use
|
|
||||||
+ """
|
|
||||||
+ self = cls(filename=name, date_time=time.localtime(time.time())[:6])
|
|
||||||
+ self.compress_type = archive.compression
|
|
||||||
+ self.compress_level = archive.compresslevel
|
|
||||||
+ if self.filename.endswith('/'): # pragma: no cover
|
|
||||||
+ self.external_attr = 0o40775 << 16 # drwxrwxr-x
|
|
||||||
+ self.external_attr |= 0x10 # MS-DOS directory flag
|
|
||||||
+ else:
|
|
||||||
+ self.external_attr = 0o600 << 16 # ?rw-------
|
|
||||||
+ return self
|
|
||||||
--- a/Lib/zipfile/_path/__init__.py
|
|
||||||
+++ b/Lib/zipfile/_path/__init__.py
|
|
||||||
@@ -1,3 +1,12 @@
|
|
||||||
+"""
|
|
||||||
+A Path-like interface for zipfiles.
|
|
||||||
+
|
|
||||||
+This codebase is shared between zipfile.Path in the stdlib
|
|
||||||
+and zipp in PyPI. See
|
|
||||||
+https://github.com/python/importlib_metadata/wiki/Development-Methodology
|
|
||||||
+for more detail.
|
|
||||||
+"""
|
|
||||||
+
|
|
||||||
import io
|
|
||||||
import posixpath
|
|
||||||
import zipfile
|
|
||||||
@@ -34,7 +43,7 @@ def _parents(path):
|
|
||||||
def _ancestry(path):
|
|
||||||
"""
|
|
||||||
Given a path with elements separated by
|
|
||||||
- posixpath.sep, generate all elements of that path
|
|
||||||
+ posixpath.sep, generate all elements of that path.
|
|
||||||
|
|
||||||
>>> list(_ancestry('b/d'))
|
|
||||||
['b/d', 'b']
|
|
||||||
@@ -46,9 +55,14 @@ def _ancestry(path):
|
|
||||||
['b']
|
|
||||||
>>> list(_ancestry(''))
|
|
||||||
[]
|
|
||||||
+
|
|
||||||
+ Multiple separators are treated like a single.
|
|
||||||
+
|
|
||||||
+ >>> list(_ancestry('//b//d///f//'))
|
|
||||||
+ ['//b//d///f', '//b//d', '//b']
|
|
||||||
"""
|
|
||||||
path = path.rstrip(posixpath.sep)
|
|
||||||
- while path and path != posixpath.sep:
|
|
||||||
+ while path.rstrip(posixpath.sep):
|
|
||||||
yield path
|
|
||||||
path, tail = posixpath.split(path)
|
|
||||||
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst
|
|
||||||
@@ -0,0 +1 @@
|
|
||||||
+:class:`zipfile.Path` objects now sanitize names from the zipfile.
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/Misc/NEWS.d/next/Library/2024-08-26-13-45-20.gh-issue-123270.gXHvNJ.rst
|
|
||||||
@@ -0,0 +1,3 @@
|
|
||||||
+Applied a more surgical fix for malformed payloads in :class:`zipfile.Path`
|
|
||||||
+causing infinite loops (gh-122905) without breaking contents using
|
|
||||||
+legitimate characters.
|
|
@ -24,16 +24,15 @@ Co-authored-by: Miro Hrončok <miro@hroncok.cz>
|
|||||||
Co-authored-by: Michal Cyprian <m.cyprian@gmail.com>
|
Co-authored-by: Michal Cyprian <m.cyprian@gmail.com>
|
||||||
Co-authored-by: Lumír Balhar <frenzy.madness@gmail.com>
|
Co-authored-by: Lumír Balhar <frenzy.madness@gmail.com>
|
||||||
---
|
---
|
||||||
Lib/site.py | 9 ++++++-
|
Lib/sysconfig.py | 51 ++++++++++++++++++++++++++++++++++++++++++++-
|
||||||
Lib/sysconfig.py | 49 +++++++++++++++++++++++++++++++++++++-
|
Lib/test/test_sysconfig.py | 17 +++++++++++++--
|
||||||
Lib/test/test_sysconfig.py | 17 +++++++++++--
|
2 files changed, 65 insertions(+), 3 deletions(-)
|
||||||
3 files changed, 71 insertions(+), 4 deletions(-)
|
|
||||||
|
|
||||||
Index: Python-3.12.4/Lib/sysconfig.py
|
Index: Python-3.12.10/Lib/sysconfig.py
|
||||||
===================================================================
|
===================================================================
|
||||||
--- Python-3.12.4.orig/Lib/sysconfig.py
|
--- Python-3.12.10.orig/Lib/sysconfig.py 2025-04-11 21:04:43.494305425 +0200
|
||||||
+++ Python-3.12.4/Lib/sysconfig.py
|
+++ Python-3.12.10/Lib/sysconfig.py 2025-04-11 21:04:51.517931810 +0200
|
||||||
@@ -104,6 +104,11 @@ if os.name == 'nt':
|
@@ -104,6 +104,11 @@
|
||||||
else:
|
else:
|
||||||
_INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv']
|
_INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv']
|
||||||
|
|
||||||
@ -45,7 +44,7 @@ Index: Python-3.12.4/Lib/sysconfig.py
|
|||||||
|
|
||||||
# NOTE: site.py has copy of this function.
|
# NOTE: site.py has copy of this function.
|
||||||
# Sync it when modify this function.
|
# Sync it when modify this function.
|
||||||
@@ -163,6 +168,19 @@ if _HAS_USER_BASE:
|
@@ -163,13 +168,28 @@
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,7 +64,16 @@ Index: Python-3.12.4/Lib/sysconfig.py
|
|||||||
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
|
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
|
||||||
'scripts', 'data')
|
'scripts', 'data')
|
||||||
|
|
||||||
@@ -263,11 +281,40 @@ def _extend_dict(target_dict, other_dict
|
_PY_VERSION = sys.version.split()[0]
|
||||||
|
_PY_VERSION_SHORT = f'{sys.version_info[0]}.{sys.version_info[1]}'
|
||||||
|
_PY_VERSION_SHORT_NO_DOT = f'{sys.version_info[0]}{sys.version_info[1]}'
|
||||||
|
+_PREFIX = os.path.normpath(sys.prefix)
|
||||||
|
_BASE_PREFIX = os.path.normpath(sys.base_prefix)
|
||||||
|
+_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
|
||||||
|
_BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
|
||||||
|
# Mutex guarding initialization of _CONFIG_VARS.
|
||||||
|
_CONFIG_VARS_LOCK = threading.RLock()
|
||||||
|
@@ -268,11 +288,40 @@
|
||||||
target_dict[key] = value
|
target_dict[key] = value
|
||||||
|
|
||||||
|
|
||||||
@ -107,11 +115,11 @@ Index: Python-3.12.4/Lib/sysconfig.py
|
|||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
# On Windows we want to substitute 'lib' for schemes rather
|
# On Windows we want to substitute 'lib' for schemes rather
|
||||||
# than the native value (without modifying vars, in case it
|
# than the native value (without modifying vars, in case it
|
||||||
Index: Python-3.12.4/Lib/test/test_sysconfig.py
|
Index: Python-3.12.10/Lib/test/test_sysconfig.py
|
||||||
===================================================================
|
===================================================================
|
||||||
--- Python-3.12.4.orig/Lib/test/test_sysconfig.py
|
--- Python-3.12.10.orig/Lib/test/test_sysconfig.py 2025-04-11 21:04:45.175417431 +0200
|
||||||
+++ Python-3.12.4/Lib/test/test_sysconfig.py
|
+++ Python-3.12.10/Lib/test/test_sysconfig.py 2025-04-11 21:04:51.518393464 +0200
|
||||||
@@ -110,8 +110,19 @@ class TestSysConfig(unittest.TestCase):
|
@@ -119,8 +119,19 @@
|
||||||
for scheme in _INSTALL_SCHEMES:
|
for scheme in _INSTALL_SCHEMES:
|
||||||
for name in _INSTALL_SCHEMES[scheme]:
|
for name in _INSTALL_SCHEMES[scheme]:
|
||||||
expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars)
|
expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars)
|
||||||
@ -132,7 +140,7 @@ Index: Python-3.12.4/Lib/test/test_sysconfig.py
|
|||||||
os.path.normpath(expected),
|
os.path.normpath(expected),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -344,7 +355,7 @@ class TestSysConfig(unittest.TestCase):
|
@@ -353,7 +364,7 @@
|
||||||
self.assertTrue(os.path.isfile(config_h), config_h)
|
self.assertTrue(os.path.isfile(config_h), config_h)
|
||||||
|
|
||||||
def test_get_scheme_names(self):
|
def test_get_scheme_names(self):
|
||||||
@ -141,7 +149,7 @@ Index: Python-3.12.4/Lib/test/test_sysconfig.py
|
|||||||
if HAS_USER_BASE:
|
if HAS_USER_BASE:
|
||||||
wanted.extend(['nt_user', 'osx_framework_user', 'posix_user'])
|
wanted.extend(['nt_user', 'osx_framework_user', 'posix_user'])
|
||||||
self.assertEqual(get_scheme_names(), tuple(sorted(wanted)))
|
self.assertEqual(get_scheme_names(), tuple(sorted(wanted)))
|
||||||
@@ -356,6 +367,8 @@ class TestSysConfig(unittest.TestCase):
|
@@ -365,6 +376,8 @@
|
||||||
cmd = "-c", "import sysconfig; print(sysconfig.get_platform())"
|
cmd = "-c", "import sysconfig; print(sysconfig.get_platform())"
|
||||||
self.assertEqual(py.call_real(*cmd), py.call_link(*cmd))
|
self.assertEqual(py.call_real(*cmd), py.call_link(*cmd))
|
||||||
|
|
||||||
|
3
Python-3.12.11.tar.xz
Normal file
3
Python-3.12.11.tar.xz
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:c30bb24b7f1e9a19b11b55a546434f74e739bb4c271a3e3a80ff4380d49f7adb
|
||||||
|
size 20525812
|
18
Python-3.12.11.tar.xz.asc
Normal file
18
Python-3.12.11.tar.xz.asc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
-----BEGIN PGP SIGNATURE-----
|
||||||
|
|
||||||
|
iQKTBAABCgB9FiEEcWlgX2LHUTVtBUomqCHmgOX6YwUFAmg/MbpfFIAAAAAALgAo
|
||||||
|
aXNzdWVyLWZwckBub3RhdGlvbnMub3BlbnBncC5maWZ0aGhvcnNlbWFuLm5ldDcx
|
||||||
|
Njk2MDVGNjJDNzUxMzU2RDA1NEEyNkE4MjFFNjgwRTVGQTYzMDUACgkQqCHmgOX6
|
||||||
|
YwXySQ/7Ba9qlnTLmxqTCO8C7Gf545WNMBL2Ep6JZPgjOcgNk9e1QdAnNV5OOtGm
|
||||||
|
gW5nNPSTNNcIcPn058GuI24D4RpTQCJfMbMLsfYgvio0E7ij1gC19PsJHb6ejtCS
|
||||||
|
H2kK237Y1kuqRUdbTZssFDoAR4R9+UCaDuo4XdW+UKQk2GgdNQDMWLKmWF/Xk6Ob
|
||||||
|
/LihMXj27mDU9nXVdWR55sJzTFzfGB015vmORvcpuctkf1lZ4AfVFMgGw1CgjRjF
|
||||||
|
kjrOkrDErjDUQ8BIhMh90deiTpigfg7cg1HBDI6GRzklFg6cMfIdfvmfM0MfamX3
|
||||||
|
Tow08TGBzmYXWgrqjYXW6JknKhBGOrjXMB7/yNDk9bJVLcOJaLbOmbcG0WRQF/Py
|
||||||
|
DMOCvr09l0yt5KFYpdKrDvyCuKYfpX33B4C60kU9JzmfXGyQ6LDTPXapZooJ+8Fg
|
||||||
|
GRTUsc0YWXoaDVCcxMIdiG+jEMQkjWVwW7E/nC/d7WT5L9KPoYFA1sZ834kKq3jr
|
||||||
|
NmZynbBnKH7m7L+u6HP6B+pa84FKEME69osAXZk0HJOIHB+SOX3E6BXRo6IV8Q/K
|
||||||
|
J6f5Ja26gJ7KXcUxTgkTkYh7tz0bhb+WeL3j6N/BC0eK7ZVsKRZ/3WnntGsG5B2m
|
||||||
|
FjVOYKolfkF4tf63SjdFuudgaKGCaDK1PvfwIr7k0oozxrB2ZEA=
|
||||||
|
=SYH/
|
||||||
|
-----END PGP SIGNATURE-----
|
1
Python-3.12.11.tar.xz.sigstore
Normal file
1
Python-3.12.11.tar.xz.sigstore
Normal file
File diff suppressed because one or more lines are too long
@ -1,3 +0,0 @@
|
|||||||
version https://git-lfs.github.com/spec/v1
|
|
||||||
oid sha256:fa8a2e12c5e620b09f53e65bcd87550d2e5a1e2e04bf8ba991dcc55113876397
|
|
||||||
size 20422396
|
|
@ -1,18 +0,0 @@
|
|||||||
-----BEGIN PGP SIGNATURE-----
|
|
||||||
|
|
||||||
iQKTBAABCgB9FiEEcWlgX2LHUTVtBUomqCHmgOX6YwUFAmayiFtfFIAAAAAALgAo
|
|
||||||
aXNzdWVyLWZwckBub3RhdGlvbnMub3BlbnBncC5maWZ0aGhvcnNlbWFuLm5ldDcx
|
|
||||||
Njk2MDVGNjJDNzUxMzU2RDA1NEEyNkE4MjFFNjgwRTVGQTYzMDUACgkQqCHmgOX6
|
|
||||||
YwUr4g//VyVs9tvbtiSp8pGe8f1gYErEw54r124sL/CBuNii8Irts1j5ymGxcm+l
|
|
||||||
hshPK5UlqRnhd5dCJWFTvLTXa5Ko2R1L3JyyxfGd1hmDuMhrWsDHijI0R7L/mGM5
|
|
||||||
6X2LTaadBVNvk8HaNKvR8SEWvo68rdnOuYElFA9ir7uqwjO26ZWz9FfH80YDGwo8
|
|
||||||
Blef2NYw8rNhiaZMFV0HYV7D+YyUAZnFNfW8M7Fd4oskUyj1tD9J89T9FFLYN09d
|
|
||||||
BcCIf+EdiEfqRpKxH89bW2g52kDrm4jYGONtpyF8eruyS3YwYSbvbuWioBYKmlxC
|
|
||||||
s51mieXz6G325GTZnmPxLek3ywPv6Gil9y0wH3fIr2BsWsmXust4LBpjDGt56Fy6
|
|
||||||
seokGBg8xzsBSk3iEqNoFmNsy/QOiuCcDejX4XqBDNodOlETQPJb07TkTI2iOmg9
|
|
||||||
NG4Atiz1HvGVxK68UuK9IIcNHyaWUmH8h4VQFGvc6KV6feP5Nm21Y12PZ5XIqJBO
|
|
||||||
Y8M/VJIJ5koaNPQfnBbbI5YBkUr4BVpIXIpY5LM/L5sUo2C3R7hMi0VGK88HGfSQ
|
|
||||||
KV4JmZgf6RMBNmrWY12sryS1QQ6q3P110GTUGQWB3sxxNbhmfcrK+4viqHc83yDz
|
|
||||||
ifmk33HuqaQGU7OzUMHeNcoCJIPo3H1FpoHOn9wLLCtA1pT+as4=
|
|
||||||
=t0Rk
|
|
||||||
-----END PGP SIGNATURE-----
|
|
@ -5,30 +5,32 @@ Subject: [PATCH] bpo-31046: ensurepip does not honour the value of $(prefix)
|
|||||||
|
|
||||||
Co-Authored-By: Xavier de Gaye <xdegaye@gmail.com>
|
Co-Authored-By: Xavier de Gaye <xdegaye@gmail.com>
|
||||||
---
|
---
|
||||||
Doc/library/ensurepip.rst | 9 +++--
|
Doc/library/ensurepip.rst | 12 +++++-
|
||||||
Lib/ensurepip/__init__.py | 18 +++++++---
|
Lib/ensurepip/__init__.py | 18 +++++++---
|
||||||
Lib/test/test_ensurepip.py | 11 ++++++
|
Lib/test/test_ensurepip.py | 11 ++++++
|
||||||
Makefile.pre.in | 4 +-
|
Makefile.pre.in | 4 +-
|
||||||
Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst | 1
|
Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst | 1
|
||||||
5 files changed, 34 insertions(+), 9 deletions(-)
|
5 files changed, 37 insertions(+), 9 deletions(-)
|
||||||
create mode 100644 Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst
|
create mode 100644 Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst
|
||||||
|
|
||||||
Index: Python-3.12.4/Doc/library/ensurepip.rst
|
Index: Python-3.12.10/Doc/library/ensurepip.rst
|
||||||
===================================================================
|
===================================================================
|
||||||
--- Python-3.12.4.orig/Doc/library/ensurepip.rst
|
--- Python-3.12.10.orig/Doc/library/ensurepip.rst 2025-04-08 13:35:47.000000000 +0200
|
||||||
+++ Python-3.12.4/Doc/library/ensurepip.rst
|
+++ Python-3.12.10/Doc/library/ensurepip.rst 2025-04-11 21:16:06.140273604 +0200
|
||||||
@@ -59,8 +59,9 @@ is at least as recent as the one availab
|
@@ -61,7 +61,11 @@
|
||||||
By default, ``pip`` is installed into the current virtual environment
|
By default, ``pip`` is installed into the current virtual environment
|
||||||
(if one is active) or into the system site packages (if there is no
|
(if one is active) or into the system site packages (if there is no
|
||||||
active virtual environment). The installation location can be controlled
|
active virtual environment). The installation location can be controlled
|
||||||
-through two additional command line options:
|
-through two additional command line options:
|
||||||
+through some additional command line options:
|
+through some additional command line options:
|
||||||
|
+
|
||||||
|
+.. option:: --prefix <dir>
|
||||||
|
+
|
||||||
|
+ Installs ``pip`` using the given directory prefix.
|
||||||
|
|
||||||
+* ``--prefix <dir>``: Installs ``pip`` using the given directory prefix.
|
.. option:: --root <dir>
|
||||||
* :samp:`--root {dir}`: Installs ``pip`` relative to the given root directory
|
|
||||||
rather than the root of the currently active virtual environment (if any)
|
@@ -102,7 +106,7 @@
|
||||||
or the default root for the current Python installation.
|
|
||||||
@@ -92,7 +93,7 @@ Module API
|
|
||||||
Returns a string specifying the available version of pip that will be
|
Returns a string specifying the available version of pip that will be
|
||||||
installed when bootstrapping an environment.
|
installed when bootstrapping an environment.
|
||||||
|
|
||||||
@ -37,7 +39,7 @@ Index: Python-3.12.4/Doc/library/ensurepip.rst
|
|||||||
altinstall=False, default_pip=False, \
|
altinstall=False, default_pip=False, \
|
||||||
verbosity=0)
|
verbosity=0)
|
||||||
|
|
||||||
@@ -102,6 +103,8 @@ Module API
|
@@ -112,6 +116,8 @@
|
||||||
If *root* is ``None``, then installation uses the default install location
|
If *root* is ``None``, then installation uses the default install location
|
||||||
for the current environment.
|
for the current environment.
|
||||||
|
|
||||||
@ -46,7 +48,7 @@ Index: Python-3.12.4/Doc/library/ensurepip.rst
|
|||||||
*upgrade* indicates whether or not to upgrade an existing installation
|
*upgrade* indicates whether or not to upgrade an existing installation
|
||||||
of an earlier version of ``pip`` to the available version.
|
of an earlier version of ``pip`` to the available version.
|
||||||
|
|
||||||
@@ -122,6 +125,8 @@ Module API
|
@@ -132,6 +138,8 @@
|
||||||
*verbosity* controls the level of output to :data:`sys.stdout` from the
|
*verbosity* controls the level of output to :data:`sys.stdout` from the
|
||||||
bootstrapping operation.
|
bootstrapping operation.
|
||||||
|
|
||||||
@ -55,11 +57,11 @@ Index: Python-3.12.4/Doc/library/ensurepip.rst
|
|||||||
.. audit-event:: ensurepip.bootstrap root ensurepip.bootstrap
|
.. audit-event:: ensurepip.bootstrap root ensurepip.bootstrap
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
Index: Python-3.12.4/Lib/ensurepip/__init__.py
|
Index: Python-3.12.10/Lib/ensurepip/__init__.py
|
||||||
===================================================================
|
===================================================================
|
||||||
--- Python-3.12.4.orig/Lib/ensurepip/__init__.py
|
--- Python-3.12.10.orig/Lib/ensurepip/__init__.py 2025-04-11 21:04:42.789443156 +0200
|
||||||
+++ Python-3.12.4/Lib/ensurepip/__init__.py
|
+++ Python-3.12.10/Lib/ensurepip/__init__.py 2025-04-11 21:13:01.303399067 +0200
|
||||||
@@ -120,27 +120,27 @@ def _disable_pip_configuration_settings(
|
@@ -120,27 +120,27 @@
|
||||||
os.environ['PIP_CONFIG_FILE'] = os.devnull
|
os.environ['PIP_CONFIG_FILE'] = os.devnull
|
||||||
|
|
||||||
|
|
||||||
@ -92,7 +94,7 @@ Index: Python-3.12.4/Lib/ensurepip/__init__.py
|
|||||||
|
|
||||||
Note that calling this function will alter both sys.path and os.environ.
|
Note that calling this function will alter both sys.path and os.environ.
|
||||||
"""
|
"""
|
||||||
@@ -190,6 +190,8 @@ def _bootstrap(*, root=None, upgrade=Fal
|
@@ -190,6 +190,8 @@
|
||||||
args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
|
args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
|
||||||
if root:
|
if root:
|
||||||
args += ["--root", root]
|
args += ["--root", root]
|
||||||
@ -101,7 +103,7 @@ Index: Python-3.12.4/Lib/ensurepip/__init__.py
|
|||||||
if upgrade:
|
if upgrade:
|
||||||
args += ["--upgrade"]
|
args += ["--upgrade"]
|
||||||
if user:
|
if user:
|
||||||
@@ -265,6 +267,11 @@ def _main(argv=None):
|
@@ -265,6 +267,11 @@
|
||||||
help="Install everything relative to this alternate root directory.",
|
help="Install everything relative to this alternate root directory.",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@ -113,7 +115,7 @@ Index: Python-3.12.4/Lib/ensurepip/__init__.py
|
|||||||
"--altinstall",
|
"--altinstall",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
@@ -283,6 +290,7 @@ def _main(argv=None):
|
@@ -283,6 +290,7 @@
|
||||||
|
|
||||||
return _bootstrap(
|
return _bootstrap(
|
||||||
root=args.root,
|
root=args.root,
|
||||||
@ -121,11 +123,11 @@ Index: Python-3.12.4/Lib/ensurepip/__init__.py
|
|||||||
upgrade=args.upgrade,
|
upgrade=args.upgrade,
|
||||||
user=args.user,
|
user=args.user,
|
||||||
verbosity=args.verbosity,
|
verbosity=args.verbosity,
|
||||||
Index: Python-3.12.4/Lib/test/test_ensurepip.py
|
Index: Python-3.12.10/Lib/test/test_ensurepip.py
|
||||||
===================================================================
|
===================================================================
|
||||||
--- Python-3.12.4.orig/Lib/test/test_ensurepip.py
|
--- Python-3.12.10.orig/Lib/test/test_ensurepip.py 2025-04-11 21:04:44.274413027 +0200
|
||||||
+++ Python-3.12.4/Lib/test/test_ensurepip.py
|
+++ Python-3.12.10/Lib/test/test_ensurepip.py 2025-04-11 21:13:01.303691075 +0200
|
||||||
@@ -105,6 +105,17 @@ class TestBootstrap(EnsurepipMixin, unit
|
@@ -105,6 +105,17 @@
|
||||||
unittest.mock.ANY,
|
unittest.mock.ANY,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -143,11 +145,11 @@ Index: Python-3.12.4/Lib/test/test_ensurepip.py
|
|||||||
def test_bootstrapping_with_user(self):
|
def test_bootstrapping_with_user(self):
|
||||||
ensurepip.bootstrap(user=True)
|
ensurepip.bootstrap(user=True)
|
||||||
|
|
||||||
Index: Python-3.12.4/Makefile.pre.in
|
Index: Python-3.12.10/Makefile.pre.in
|
||||||
===================================================================
|
===================================================================
|
||||||
--- Python-3.12.4.orig/Makefile.pre.in
|
--- Python-3.12.10.orig/Makefile.pre.in 2025-04-11 21:04:58.388346212 +0200
|
||||||
+++ Python-3.12.4/Makefile.pre.in
|
+++ Python-3.12.10/Makefile.pre.in 2025-04-11 21:13:01.304095180 +0200
|
||||||
@@ -1914,7 +1914,7 @@ install: @FRAMEWORKINSTALLFIRST@ commoni
|
@@ -1914,7 +1914,7 @@
|
||||||
install|*) ensurepip="" ;; \
|
install|*) ensurepip="" ;; \
|
||||||
esac; \
|
esac; \
|
||||||
$(RUNSHARED) $(PYTHON_FOR_BUILD) -m ensurepip \
|
$(RUNSHARED) $(PYTHON_FOR_BUILD) -m ensurepip \
|
||||||
@ -156,7 +158,7 @@ Index: Python-3.12.4/Makefile.pre.in
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: altinstall
|
.PHONY: altinstall
|
||||||
@@ -1925,7 +1925,7 @@ altinstall: commoninstall
|
@@ -1925,7 +1925,7 @@
|
||||||
install|*) ensurepip="--altinstall" ;; \
|
install|*) ensurepip="--altinstall" ;; \
|
||||||
esac; \
|
esac; \
|
||||||
$(RUNSHARED) $(PYTHON_FOR_BUILD) -m ensurepip \
|
$(RUNSHARED) $(PYTHON_FOR_BUILD) -m ensurepip \
|
||||||
@ -165,9 +167,9 @@ Index: Python-3.12.4/Makefile.pre.in
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: commoninstall
|
.PHONY: commoninstall
|
||||||
Index: Python-3.12.4/Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst
|
Index: Python-3.12.10/Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst
|
||||||
===================================================================
|
===================================================================
|
||||||
--- /dev/null
|
--- /dev/null 1970-01-01 00:00:00.000000000 +0000
|
||||||
+++ Python-3.12.4/Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst
|
+++ Python-3.12.10/Misc/NEWS.d/next/Build/2019-12-16-17-50-42.bpo-31046.XA-Qfr.rst 2025-04-11 21:13:01.304672632 +0200
|
||||||
@@ -0,0 +1 @@
|
@@ -0,0 +1 @@
|
||||||
+A directory prefix can now be specified when using :mod:`ensurepip`.
|
+A directory prefix can now be specified when using :mod:`ensurepip`.
|
||||||
|
780
doc-py38-to-py36.patch
Normal file
780
doc-py38-to-py36.patch
Normal file
@ -0,0 +1,780 @@
|
|||||||
|
---
|
||||||
|
Doc/Makefile | 8 +--
|
||||||
|
Doc/conf.py | 16 ++++++-
|
||||||
|
Doc/tools/check-warnings.py | 5 +-
|
||||||
|
Doc/tools/extensions/audit_events.py | 54 +++++++++++++-------------
|
||||||
|
Doc/tools/extensions/availability.py | 15 +++----
|
||||||
|
Doc/tools/extensions/c_annotations.py | 45 +++++++++++++--------
|
||||||
|
Doc/tools/extensions/changes.py | 8 +--
|
||||||
|
Doc/tools/extensions/glossary_search.py | 10 +---
|
||||||
|
Doc/tools/extensions/implementation_detail.py | 22 +++-------
|
||||||
|
Doc/tools/extensions/issue_role.py | 16 ++-----
|
||||||
|
Doc/tools/extensions/misc_news.py | 14 ++----
|
||||||
|
Doc/tools/extensions/patchlevel.py | 9 ++--
|
||||||
|
Doc/tools/extensions/pydoc_topics.py | 22 +++++-----
|
||||||
|
13 files changed, 126 insertions(+), 118 deletions(-)
|
||||||
|
|
||||||
|
Index: Python-3.12.10/Doc/Makefile
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/Makefile 2025-04-29 22:11:50.013198738 +0200
|
||||||
|
+++ Python-3.12.10/Doc/Makefile 2025-04-29 22:11:52.047098026 +0200
|
||||||
|
@@ -14,15 +14,15 @@
|
||||||
|
SOURCES =
|
||||||
|
DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py)
|
||||||
|
REQUIREMENTS = requirements.txt
|
||||||
|
-SPHINXERRORHANDLING = --fail-on-warning
|
||||||
|
+SPHINXERRORHANDLING = -W
|
||||||
|
|
||||||
|
# Internal variables.
|
||||||
|
PAPEROPT_a4 = --define latex_elements.papersize=a4paper
|
||||||
|
PAPEROPT_letter = --define latex_elements.papersize=letterpaper
|
||||||
|
|
||||||
|
-ALLSPHINXOPTS = --builder $(BUILDER) \
|
||||||
|
- --doctree-dir build/doctrees \
|
||||||
|
- --jobs $(JOBS) \
|
||||||
|
+ALLSPHINXOPTS = -b $(BUILDER) \
|
||||||
|
+ -d build/doctrees \
|
||||||
|
+ -j $(JOBS) \
|
||||||
|
$(PAPEROPT_$(PAPER)) \
|
||||||
|
$(SPHINXOPTS) $(SPHINXERRORHANDLING) \
|
||||||
|
. build/$(BUILDER) $(SOURCES)
|
||||||
|
Index: Python-3.12.10/Doc/conf.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/conf.py 2025-04-29 22:11:46.161835452 +0200
|
||||||
|
+++ Python-3.12.10/Doc/conf.py 2025-04-29 22:11:52.047459667 +0200
|
||||||
|
@@ -11,6 +11,8 @@
|
||||||
|
from importlib import import_module
|
||||||
|
from importlib.util import find_spec
|
||||||
|
|
||||||
|
+from sphinx import version_info
|
||||||
|
+
|
||||||
|
# Make our custom extensions available to Sphinx
|
||||||
|
sys.path.append(os.path.abspath('tools/extensions'))
|
||||||
|
sys.path.append(os.path.abspath('includes'))
|
||||||
|
@@ -87,7 +89,7 @@
|
||||||
|
|
||||||
|
# Minimum version of sphinx required
|
||||||
|
# Keep this version in sync with ``Doc/requirements.txt``.
|
||||||
|
-needs_sphinx = '8.2.0'
|
||||||
|
+needs_sphinx = '4.2.0'
|
||||||
|
|
||||||
|
# Create table of contents entries for domain objects (e.g. functions, classes,
|
||||||
|
# attributes, etc.). Default is True.
|
||||||
|
@@ -342,7 +344,7 @@
|
||||||
|
# (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html)
|
||||||
|
is_deployment_preview = os.getenv("READTHEDOCS_VERSION_TYPE") == "external"
|
||||||
|
repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL", "")
|
||||||
|
-repository_url = repository_url.removesuffix(".git")
|
||||||
|
+repository_url = repository_url[:-len(".git")]
|
||||||
|
html_context = {
|
||||||
|
"is_deployment_preview": is_deployment_preview,
|
||||||
|
"repository_url": repository_url or None,
|
||||||
|
@@ -588,6 +590,16 @@
|
||||||
|
}
|
||||||
|
extlinks_detect_hardcoded_links = True
|
||||||
|
|
||||||
|
+if version_info[:2] < (8, 1):
|
||||||
|
+ # Sphinx 8.1 has in-built CVE and CWE roles.
|
||||||
|
+ extlinks.update({
|
||||||
|
+ "cve": (
|
||||||
|
+ "https://www.cve.org/CVERecord?id=CVE-%s",
|
||||||
|
+ "CVE-%s",
|
||||||
|
+ ),
|
||||||
|
+ "cwe": ("https://cwe.mitre.org/data/definitions/%s.html", "CWE-%s"),
|
||||||
|
+ })
|
||||||
|
+
|
||||||
|
# Options for c_annotations extension
|
||||||
|
# -----------------------------------
|
||||||
|
|
||||||
|
Index: Python-3.12.10/Doc/tools/check-warnings.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/check-warnings.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/check-warnings.py 2025-04-29 22:11:52.047704324 +0200
|
||||||
|
@@ -228,7 +228,8 @@
|
||||||
|
print(filename)
|
||||||
|
for warning in warnings:
|
||||||
|
if filename in warning:
|
||||||
|
- if match := WARNING_PATTERN.fullmatch(warning):
|
||||||
|
+ match = WARNING_PATTERN.fullmatch(warning)
|
||||||
|
+ if match:
|
||||||
|
print(" {line}: {msg}".format_map(match))
|
||||||
|
return -1
|
||||||
|
return 0
|
||||||
|
@@ -316,7 +317,7 @@
|
||||||
|
|
||||||
|
cwd = str(Path.cwd()) + os.path.sep
|
||||||
|
files_with_nits = {
|
||||||
|
- warning.removeprefix(cwd).split(":")[0]
|
||||||
|
+ (warning[len(cwd):].split(":")[0] if warning.startswith(cwd) else warning.split(":")[0])
|
||||||
|
for warning in warnings
|
||||||
|
if "Doc/" in warning
|
||||||
|
}
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/audit_events.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/audit_events.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/audit_events.py 2025-04-29 22:11:52.047967558 +0200
|
||||||
|
@@ -1,9 +1,6 @@
|
||||||
|
"""Support for documenting audit events."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import re
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx.errors import NoUri
|
||||||
|
@@ -12,12 +9,11 @@
|
||||||
|
from sphinx.util import logging
|
||||||
|
from sphinx.util.docutils import SphinxDirective
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from collections.abc import Iterator
|
||||||
|
+from typing import Any, List, Tuple
|
||||||
|
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.builders import Builder
|
||||||
|
- from sphinx.environment import BuildEnvironment
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
+from sphinx.builders import Builder
|
||||||
|
+from sphinx.environment import BuildEnvironment
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -32,16 +28,16 @@
|
||||||
|
|
||||||
|
class AuditEvents:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
- self.events: dict[str, list[str]] = {}
|
||||||
|
- self.sources: dict[str, list[tuple[str, str]]] = {}
|
||||||
|
+ self.events: dict[str, List[str]] = {}
|
||||||
|
+ self.sources: dict[str, List[Tuple[str, str]]] = {}
|
||||||
|
|
||||||
|
- def __iter__(self) -> Iterator[tuple[str, list[str], tuple[str, str]]]:
|
||||||
|
+ def __iter__(self) -> Any:
|
||||||
|
for name, args in self.events.items():
|
||||||
|
for source in self.sources[name]:
|
||||||
|
yield name, args, source
|
||||||
|
|
||||||
|
def add_event(
|
||||||
|
- self, name, args: list[str], source: tuple[str, str]
|
||||||
|
+ self, name, args: List[str], source: Tuple[str, str]
|
||||||
|
) -> None:
|
||||||
|
if name in self.events:
|
||||||
|
self._check_args_match(name, args)
|
||||||
|
@@ -49,7 +45,7 @@
|
||||||
|
self.events[name] = args
|
||||||
|
self.sources.setdefault(name, []).append(source)
|
||||||
|
|
||||||
|
- def _check_args_match(self, name: str, args: list[str]) -> None:
|
||||||
|
+ def _check_args_match(self, name: str, args: List[str]) -> None:
|
||||||
|
current_args = self.events[name]
|
||||||
|
msg = (
|
||||||
|
f"Mismatched arguments for audit-event {name}: "
|
||||||
|
@@ -60,7 +56,7 @@
|
||||||
|
if len(current_args) != len(args):
|
||||||
|
logger.warning(msg)
|
||||||
|
return
|
||||||
|
- for a1, a2 in zip(current_args, args, strict=False):
|
||||||
|
+ for a1, a2 in zip(current_args, args):
|
||||||
|
if a1 == a2:
|
||||||
|
continue
|
||||||
|
if any(a1 in s and a2 in s for s in _SYNONYMS):
|
||||||
|
@@ -73,7 +69,7 @@
|
||||||
|
name_clean = re.sub(r"\W", "_", name)
|
||||||
|
return f"audit_event_{name_clean}_{source_count}"
|
||||||
|
|
||||||
|
- def rows(self) -> Iterator[tuple[str, list[str], list[tuple[str, str]]]]:
|
||||||
|
+ def rows(self) -> Any:
|
||||||
|
for name in sorted(self.events.keys()):
|
||||||
|
yield name, self.events[name], self.sources[name]
|
||||||
|
|
||||||
|
@@ -97,7 +93,7 @@
|
||||||
|
def audit_events_merge(
|
||||||
|
app: Sphinx,
|
||||||
|
env: BuildEnvironment,
|
||||||
|
- docnames: list[str],
|
||||||
|
+ docnames: List[str],
|
||||||
|
other: BuildEnvironment,
|
||||||
|
) -> None:
|
||||||
|
"""In Sphinx parallel builds, this merges audit_events from subprocesses."""
|
||||||
|
@@ -126,14 +122,16 @@
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
- def run(self) -> list[nodes.paragraph]:
|
||||||
|
+ def run(self) -> List[nodes.paragraph]:
|
||||||
|
+ def _no_walrus_op(args):
|
||||||
|
+ for arg in args.strip("'\"").split(","):
|
||||||
|
+ aarg = arg.strip()
|
||||||
|
+ if aarg:
|
||||||
|
+ yield aarg
|
||||||
|
+
|
||||||
|
name = self.arguments[0]
|
||||||
|
if len(self.arguments) >= 2 and self.arguments[1]:
|
||||||
|
- args = [
|
||||||
|
- arg
|
||||||
|
- for argument in self.arguments[1].strip("'\"").split(",")
|
||||||
|
- if (arg := argument.strip())
|
||||||
|
- ]
|
||||||
|
+ args = list(_no_walrus_op(self.arguments[1]))
|
||||||
|
else:
|
||||||
|
args = []
|
||||||
|
ids = []
|
||||||
|
@@ -169,7 +167,7 @@
|
||||||
|
|
||||||
|
|
||||||
|
class AuditEventListDirective(SphinxDirective):
|
||||||
|
- def run(self) -> list[audit_event_list]:
|
||||||
|
+ def run(self) -> List[audit_event_list]:
|
||||||
|
return [audit_event_list()]
|
||||||
|
|
||||||
|
|
||||||
|
@@ -181,7 +179,11 @@
|
||||||
|
return
|
||||||
|
|
||||||
|
table = self._make_table(self.app.builder, self.env.docname)
|
||||||
|
- for node in self.document.findall(audit_event_list):
|
||||||
|
+ try:
|
||||||
|
+ findall = self.document.findall
|
||||||
|
+ except AttributeError:
|
||||||
|
+ findall = self.document.traverse
|
||||||
|
+ for node in findall(audit_event_list):
|
||||||
|
node.replace_self(table)
|
||||||
|
|
||||||
|
def _make_table(self, builder: Builder, docname: str) -> nodes.table:
|
||||||
|
@@ -217,8 +219,8 @@
|
||||||
|
builder: Builder,
|
||||||
|
docname: str,
|
||||||
|
name: str,
|
||||||
|
- args: list[str],
|
||||||
|
- sources: list[tuple[str, str]],
|
||||||
|
+ args: List[str],
|
||||||
|
+ sources: List[Tuple[str, str]],
|
||||||
|
) -> nodes.row:
|
||||||
|
row = nodes.row()
|
||||||
|
name_node = nodes.paragraph("", nodes.Text(name))
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/availability.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/availability.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/availability.py 2025-04-29 22:11:52.048206976 +0200
|
||||||
|
@@ -1,8 +1,6 @@
|
||||||
|
"""Support for documenting platform availability"""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import Dict, List, TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx import addnodes
|
||||||
|
@@ -53,7 +51,7 @@
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = True
|
||||||
|
|
||||||
|
- def run(self) -> list[nodes.container]:
|
||||||
|
+ def run(self) -> List[nodes.container]:
|
||||||
|
title = sphinx_gettext("Availability")
|
||||||
|
refnode = addnodes.pending_xref(
|
||||||
|
title,
|
||||||
|
@@ -77,7 +75,7 @@
|
||||||
|
|
||||||
|
return [cnode]
|
||||||
|
|
||||||
|
- def parse_platforms(self) -> dict[str, str | bool]:
|
||||||
|
+ def parse_platforms(self) -> Dict[str, Union[str, bool]]:
|
||||||
|
"""Parse platform information from arguments
|
||||||
|
|
||||||
|
Arguments is a comma-separated string of platforms. A platform may
|
||||||
|
@@ -96,12 +94,13 @@
|
||||||
|
platform, _, version = arg.partition(" >= ")
|
||||||
|
if platform.startswith("not "):
|
||||||
|
version = False
|
||||||
|
- platform = platform.removeprefix("not ")
|
||||||
|
+ platform = platform[len("not "):]
|
||||||
|
elif not version:
|
||||||
|
version = True
|
||||||
|
platforms[platform] = version
|
||||||
|
|
||||||
|
- if unknown := set(platforms).difference(KNOWN_PLATFORMS):
|
||||||
|
+ unknown = set(platforms).difference(KNOWN_PLATFORMS)
|
||||||
|
+ if unknown:
|
||||||
|
logger.warning(
|
||||||
|
"Unknown platform%s or syntax '%s' in '.. availability:: %s', "
|
||||||
|
"see %s:KNOWN_PLATFORMS for a set of known platforms.",
|
||||||
|
@@ -114,7 +113,7 @@
|
||||||
|
return platforms
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app):
|
||||||
|
app.add_directive("availability", Availability)
|
||||||
|
|
||||||
|
return {
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/c_annotations.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/c_annotations.py 2025-04-29 22:11:52.033400629 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/c_annotations.py 2025-04-29 22:11:52.048411194 +0200
|
||||||
|
@@ -9,22 +9,18 @@
|
||||||
|
* Set ``stable_abi_file`` to the path to stable ABI list.
|
||||||
|
"""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import csv
|
||||||
|
import dataclasses
|
||||||
|
from pathlib import Path
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import Any, Dict, List, TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.statemachine import StringList
|
||||||
|
-from sphinx import addnodes
|
||||||
|
+from sphinx import addnodes, version_info
|
||||||
|
from sphinx.locale import _ as sphinx_gettext
|
||||||
|
from sphinx.util.docutils import SphinxDirective
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.util.typing import ExtensionMetadata
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
ROLE_TO_OBJECT_TYPE = {
|
||||||
|
"func": "function",
|
||||||
|
@@ -35,20 +31,20 @@
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
-@dataclasses.dataclass(slots=True)
|
||||||
|
+@dataclasses.dataclass()
|
||||||
|
class RefCountEntry:
|
||||||
|
# Name of the function.
|
||||||
|
name: str
|
||||||
|
# List of (argument name, type, refcount effect) tuples.
|
||||||
|
# (Currently not used. If it was, a dataclass might work better.)
|
||||||
|
- args: list = dataclasses.field(default_factory=list)
|
||||||
|
+ args: List = dataclasses.field(default_factory=list)
|
||||||
|
# Return type of the function.
|
||||||
|
result_type: str = ""
|
||||||
|
# Reference count effect for the return value.
|
||||||
|
- result_refs: int | None = None
|
||||||
|
+ result_refs: Union[int, None] = None
|
||||||
|
|
||||||
|
|
||||||
|
-@dataclasses.dataclass(frozen=True, slots=True)
|
||||||
|
+@dataclasses.dataclass(frozen=True)
|
||||||
|
class StableABIEntry:
|
||||||
|
# Role of the object.
|
||||||
|
# Source: Each [item_kind] in stable_abi.toml is mapped to a C Domain role.
|
||||||
|
@@ -67,7 +63,7 @@
|
||||||
|
struct_abi_kind: str
|
||||||
|
|
||||||
|
|
||||||
|
-def read_refcount_data(refcount_filename: Path) -> dict[str, RefCountEntry]:
|
||||||
|
+def read_refcount_data(refcount_filename: Path) -> Dict[str, RefCountEntry]:
|
||||||
|
refcount_data = {}
|
||||||
|
refcounts = refcount_filename.read_text(encoding="utf8")
|
||||||
|
for line in refcounts.splitlines():
|
||||||
|
@@ -103,7 +99,7 @@
|
||||||
|
return refcount_data
|
||||||
|
|
||||||
|
|
||||||
|
-def read_stable_abi_data(stable_abi_file: Path) -> dict[str, StableABIEntry]:
|
||||||
|
+def read_stable_abi_data(stable_abi_file: Path) -> Dict[str, StableABIEntry]:
|
||||||
|
stable_abi_data = {}
|
||||||
|
with open(stable_abi_file, encoding="utf8") as fp:
|
||||||
|
for record in csv.DictReader(fp):
|
||||||
|
@@ -127,11 +123,14 @@
|
||||||
|
continue
|
||||||
|
if not par[0].get("ids", None):
|
||||||
|
continue
|
||||||
|
- name = par[0]["ids"][0].removeprefix("c.")
|
||||||
|
+ name = par[0]["ids"][0]
|
||||||
|
+ if name.startswith("c."):
|
||||||
|
+ name = name[len("c."):]
|
||||||
|
objtype = par["objtype"]
|
||||||
|
|
||||||
|
# Stable ABI annotation.
|
||||||
|
- if record := stable_abi_data.get(name):
|
||||||
|
+ record = stable_abi_data.get(name)
|
||||||
|
+ if record:
|
||||||
|
if ROLE_TO_OBJECT_TYPE[record.role] != objtype:
|
||||||
|
msg = (
|
||||||
|
f"Object type mismatch in limited API annotation for {name}: "
|
||||||
|
@@ -238,7 +237,7 @@
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
-def _return_value_annotation(result_refs: int | None) -> nodes.emphasis:
|
||||||
|
+def _return_value_annotation(result_refs: Union[int, None]) -> nodes.emphasis:
|
||||||
|
classes = ["refcount"]
|
||||||
|
if result_refs is None:
|
||||||
|
rc = sphinx_gettext("Return value: Always NULL.")
|
||||||
|
@@ -258,7 +257,7 @@
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = True
|
||||||
|
|
||||||
|
- def run(self) -> list[nodes.Node]:
|
||||||
|
+ def run(self) -> List[nodes.Node]:
|
||||||
|
state = self.env.domaindata["c_annotations"]
|
||||||
|
content = [
|
||||||
|
f"* :c:{record.role}:`{record.name}`"
|
||||||
|
@@ -281,13 +280,23 @@
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: Sphinx) -> Any:
|
||||||
|
app.add_config_value("refcount_file", "", "env", types={str})
|
||||||
|
app.add_config_value("stable_abi_file", "", "env", types={str})
|
||||||
|
app.add_directive("limited-api-list", LimitedAPIList)
|
||||||
|
app.connect("builder-inited", init_annotations)
|
||||||
|
app.connect("doctree-read", add_annotations)
|
||||||
|
|
||||||
|
+ if version_info[:2] < (7, 2):
|
||||||
|
+ from docutils.parsers.rst import directives
|
||||||
|
+ from sphinx.domains.c import CObject
|
||||||
|
+
|
||||||
|
+ # monkey-patch C object...
|
||||||
|
+ CObject.option_spec.update({
|
||||||
|
+ "no-index-entry": directives.flag,
|
||||||
|
+ "no-contents-entry": directives.flag,
|
||||||
|
+ })
|
||||||
|
+
|
||||||
|
return {
|
||||||
|
"version": "1.0",
|
||||||
|
"parallel_read_safe": True,
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/changes.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/changes.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/changes.py 2025-04-29 22:11:52.048619113 +0200
|
||||||
|
@@ -1,7 +1,5 @@
|
||||||
|
"""Support for documenting version of changes, additions, deprecations."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sphinx.domains.changeset import (
|
||||||
|
@@ -25,7 +23,7 @@
|
||||||
|
|
||||||
|
|
||||||
|
class PyVersionChange(VersionChange):
|
||||||
|
- def run(self) -> list[Node]:
|
||||||
|
+ def run(self) -> "list[Node]":
|
||||||
|
# Replace the 'next' special token with the current development version
|
||||||
|
self.arguments[0] = expand_version_arg(
|
||||||
|
self.arguments[0], self.config.release
|
||||||
|
@@ -43,7 +41,7 @@
|
||||||
|
"Deprecated since version %s, removed in version %s"
|
||||||
|
)
|
||||||
|
|
||||||
|
- def run(self) -> list[Node]:
|
||||||
|
+ def run(self) -> "list[Node]":
|
||||||
|
# Replace the first two arguments (deprecated version and removed version)
|
||||||
|
# with a single tuple of both versions.
|
||||||
|
version_deprecated = expand_version_arg(
|
||||||
|
@@ -73,7 +71,7 @@
|
||||||
|
versionlabel_classes[self.name] = ""
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: "Sphinx") -> "ExtensionMetadata":
|
||||||
|
# Override Sphinx's directives with support for 'next'
|
||||||
|
app.add_directive("versionadded", PyVersionChange, override=True)
|
||||||
|
app.add_directive("versionchanged", PyVersionChange, override=True)
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/glossary_search.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/glossary_search.py 2025-04-29 22:11:52.033722879 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/glossary_search.py 2025-04-29 22:11:52.048797629 +0200
|
||||||
|
@@ -1,18 +1,14 @@
|
||||||
|
"""Feature search results for glossary items prominently."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import Any, TYPE_CHECKING
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx.addnodes import glossary
|
||||||
|
from sphinx.util import logging
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.util.typing import ExtensionMetadata
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -60,7 +56,7 @@
|
||||||
|
dest.write_text(json.dumps(app.env.glossary_terms), encoding='utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: Sphinx) -> Any:
|
||||||
|
app.connect('doctree-resolved', process_glossary_nodes)
|
||||||
|
app.connect('build-finished', write_glossary_json)
|
||||||
|
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/implementation_detail.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/implementation_detail.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/implementation_detail.py 2025-04-29 22:48:23.397548211 +0200
|
||||||
|
@@ -1,17 +1,10 @@
|
||||||
|
"""Support for marking up implementation details."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
-
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx.locale import _ as sphinx_gettext
|
||||||
|
from sphinx.util.docutils import SphinxDirective
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.util.typing import ExtensionMetadata
|
||||||
|
-
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
class ImplementationDetail(SphinxDirective):
|
||||||
|
has_content = True
|
||||||
|
@@ -21,23 +14,24 @@
|
||||||
|
label_text = sphinx_gettext("CPython implementation detail:")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
- self.assert_has_content()
|
||||||
|
- content_nodes = self.parse_content_to_nodes()
|
||||||
|
+ container_node = nodes.container()
|
||||||
|
+ container_node.document = self.state.document # Ensure node has document context
|
||||||
|
+ self.state.nested_parse(self.content, self.content_offset, container_node)
|
||||||
|
+ parsed_nodes = container_node.children
|
||||||
|
|
||||||
|
# insert our prefix at the start of the first paragraph
|
||||||
|
- first_node = content_nodes[0]
|
||||||
|
+ first_node = parsed_nodes[0]
|
||||||
|
first_node[:0] = [
|
||||||
|
nodes.strong(self.label_text, self.label_text),
|
||||||
|
nodes.Text(" "),
|
||||||
|
]
|
||||||
|
|
||||||
|
- # create a new compound container node
|
||||||
|
- cnode = nodes.compound("", *content_nodes, classes=["impl-detail"])
|
||||||
|
+ cnode = nodes.compound("", *parsed_nodes, classes=["impl-detail"])
|
||||||
|
self.set_source_info(cnode)
|
||||||
|
return [cnode]
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: Sphinx):
|
||||||
|
app.add_directive("impl-detail", ImplementationDetail)
|
||||||
|
|
||||||
|
return {
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/issue_role.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/issue_role.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/issue_role.py 2025-04-29 22:21:55.278961032 +0200
|
||||||
|
@@ -1,22 +1,18 @@
|
||||||
|
"""Support for referencing issues in the tracker."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import TYPE_CHECKING, List, Tuple
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx.util.docutils import SphinxRole
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from docutils.nodes import Element
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.util.typing import ExtensionMetadata
|
||||||
|
+from docutils.nodes import Element
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
|
||||||
|
class BPOIssue(SphinxRole):
|
||||||
|
ISSUE_URI = "https://bugs.python.org/issue?@action=redirect&bpo={0}"
|
||||||
|
|
||||||
|
- def run(self) -> tuple[list[Element], list[nodes.system_message]]:
|
||||||
|
+ def run(self) -> Tuple[List[Element], List[nodes.system_message]]:
|
||||||
|
issue = self.text
|
||||||
|
|
||||||
|
# sanity check: there are no bpo issues within these two values
|
||||||
|
@@ -38,7 +34,7 @@
|
||||||
|
class GitHubIssue(SphinxRole):
|
||||||
|
ISSUE_URI = "https://github.com/python/cpython/issues/{0}"
|
||||||
|
|
||||||
|
- def run(self) -> tuple[list[Element], list[nodes.system_message]]:
|
||||||
|
+ def run(self) -> Tuple[List[Element], List[nodes.system_message]]:
|
||||||
|
issue = self.text
|
||||||
|
|
||||||
|
# sanity check: all GitHub issues have ID >= 32426
|
||||||
|
@@ -58,7 +54,7 @@
|
||||||
|
return [refnode], []
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: Sphinx) -> "ExtensionMetadata":
|
||||||
|
app.add_role("issue", BPOIssue())
|
||||||
|
app.add_role("gh", GitHubIssue())
|
||||||
|
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/misc_news.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/misc_news.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/misc_news.py 2025-04-29 22:11:52.049046825 +0200
|
||||||
|
@@ -1,7 +1,5 @@
|
||||||
|
"""Support for including Misc/NEWS."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
@@ -24,13 +22,13 @@
|
||||||
|
+++++++++++
|
||||||
|
"""
|
||||||
|
|
||||||
|
-bpo_issue_re: Final[re.Pattern[str]] = re.compile(
|
||||||
|
+bpo_issue_re: "Final[re.Pattern[str]]" = re.compile(
|
||||||
|
"(?:issue #|bpo-)([0-9]+)", re.ASCII
|
||||||
|
)
|
||||||
|
-gh_issue_re: Final[re.Pattern[str]] = re.compile(
|
||||||
|
+gh_issue_re: "Final[re.Pattern[str]]" = re.compile(
|
||||||
|
"gh-(?:issue-)?([0-9]+)", re.ASCII | re.IGNORECASE
|
||||||
|
)
|
||||||
|
-whatsnew_re: Final[re.Pattern[str]] = re.compile(
|
||||||
|
+whatsnew_re: "Final[re.Pattern[str]]" = re.compile(
|
||||||
|
r"^what's new in (.*?)\??$", re.ASCII | re.IGNORECASE | re.MULTILINE
|
||||||
|
)
|
||||||
|
|
||||||
|
@@ -42,7 +40,7 @@
|
||||||
|
final_argument_whitespace = False
|
||||||
|
option_spec = {}
|
||||||
|
|
||||||
|
- def run(self) -> list[Node]:
|
||||||
|
+ def run(self) -> "list[Node]":
|
||||||
|
# Get content of NEWS file
|
||||||
|
source, _ = self.get_source_info()
|
||||||
|
news_file = Path(source).resolve().parent / self.arguments[0]
|
||||||
|
@@ -54,7 +52,7 @@
|
||||||
|
return [nodes.strong(text, text)]
|
||||||
|
|
||||||
|
# remove first 3 lines as they are the main heading
|
||||||
|
- news_text = news_text.removeprefix(BLURB_HEADER)
|
||||||
|
+ news_text = news_text[len(BLURB_HEADER):] if news_text.startswith(BLURB_HEADER) else news_text
|
||||||
|
|
||||||
|
news_text = bpo_issue_re.sub(r":issue:`\1`", news_text)
|
||||||
|
# Fallback handling for GitHub issues
|
||||||
|
@@ -65,7 +63,7 @@
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: "Sphinx") -> "ExtensionMetadata":
|
||||||
|
app.add_directive("miscnews", MiscNews)
|
||||||
|
|
||||||
|
return {
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/patchlevel.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/patchlevel.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/patchlevel.py 2025-04-29 22:11:52.049253068 +0200
|
||||||
|
@@ -3,7 +3,7 @@
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
-from typing import Literal, NamedTuple
|
||||||
|
+from typing import NamedTuple, Tuple
|
||||||
|
|
||||||
|
CPYTHON_ROOT = Path(
|
||||||
|
__file__, # cpython/Doc/tools/extensions/patchlevel.py
|
||||||
|
@@ -26,7 +26,7 @@
|
||||||
|
major: int #: Major release number
|
||||||
|
minor: int #: Minor release number
|
||||||
|
micro: int #: Patch release number
|
||||||
|
- releaselevel: Literal["alpha", "beta", "candidate", "final"]
|
||||||
|
+ releaselevel: str
|
||||||
|
serial: int #: Serial release number
|
||||||
|
|
||||||
|
|
||||||
|
@@ -37,7 +37,8 @@
|
||||||
|
defines = {}
|
||||||
|
patchlevel_h = PATCHLEVEL_H.read_text(encoding="utf-8")
|
||||||
|
for line in patchlevel_h.splitlines():
|
||||||
|
- if (m := pat.match(line)) is not None:
|
||||||
|
+ m = pat.match(line)
|
||||||
|
+ if m is not None:
|
||||||
|
name, value = m.groups()
|
||||||
|
defines[name] = value
|
||||||
|
|
||||||
|
@@ -50,7 +51,7 @@
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
-def format_version_info(info: version_info) -> tuple[str, str]:
|
||||||
|
+def format_version_info(info: version_info) -> Tuple[str, str]:
|
||||||
|
version = f"{info.major}.{info.minor}"
|
||||||
|
release = f"{info.major}.{info.minor}.{info.micro}"
|
||||||
|
if info.releaselevel != "final":
|
||||||
|
Index: Python-3.12.10/Doc/tools/extensions/pydoc_topics.py
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.12.10.orig/Doc/tools/extensions/pydoc_topics.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/pydoc_topics.py 2025-04-29 22:33:59.916893510 +0200
|
||||||
|
@@ -1,21 +1,23 @@
|
||||||
|
"""Support for building "topic help" for pydoc."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
from time import asctime
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import TYPE_CHECKING, Tuple
|
||||||
|
|
||||||
|
from sphinx.builders.text import TextBuilder
|
||||||
|
from sphinx.util import logging
|
||||||
|
-from sphinx.util.display import status_iterator
|
||||||
|
+try:
|
||||||
|
+ from sphinx.util.display import status_iterator
|
||||||
|
+except ModuleNotFoundError:
|
||||||
|
+ from sphinx.util import status_iterator
|
||||||
|
from sphinx.util.docutils import new_document
|
||||||
|
from sphinx.writers.text import TextTranslator
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
+try:
|
||||||
|
+ from typing import Sequence, Set
|
||||||
|
+except ModuleNotFoundError:
|
||||||
|
from collections.abc import Sequence, Set
|
||||||
|
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.util.typing import ExtensionMetadata
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -161,7 +163,7 @@
|
||||||
|
self.outdir.joinpath("topics.py").write_text(topics, encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
-def _display_labels(item: tuple[str, Sequence[tuple[str, str]]]) -> str:
|
||||||
|
+def _display_labels(item: Tuple[str, Sequence[Tuple[str, str]]]) -> str:
|
||||||
|
_docname, label_ids = item
|
||||||
|
labels = [name for name, _id in label_ids]
|
||||||
|
if len(labels) > 4:
|
||||||
|
@@ -169,7 +171,7 @@
|
||||||
|
return ", ".join(labels)
|
||||||
|
|
||||||
|
|
||||||
|
-def _repr(text: str, /) -> str:
|
||||||
|
+def _repr(text: str) -> str:
|
||||||
|
"""Return a triple-single-quoted representation of text."""
|
||||||
|
if "'''" not in text:
|
||||||
|
return f"r'''{text}'''"
|
||||||
|
@@ -177,7 +179,7 @@
|
||||||
|
return f"'''{text}'''"
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: Sphinx) -> "ExtensionMetadata":
|
||||||
|
app.add_builder(PydocTopicsBuilder)
|
||||||
|
|
||||||
|
return {
|
@ -1,12 +1,13 @@
|
|||||||
---
|
---
|
||||||
Doc/tools/extensions/c_annotations.py | 6 +++++-
|
Doc/tools/extensions/c_annotations.py | 6 +++++-
|
||||||
Doc/tools/extensions/glossary_search.py | 12 ++++++++++--
|
Doc/tools/extensions/glossary_search.py | 12 ++++++++++--
|
||||||
Doc/tools/extensions/pyspecific.py | 5 ++++-
|
2 files changed, 15 insertions(+), 3 deletions(-)
|
||||||
3 files changed, 19 insertions(+), 4 deletions(-)
|
|
||||||
|
|
||||||
--- a/Doc/tools/extensions/c_annotations.py
|
Index: Python-3.12.10/Doc/tools/extensions/c_annotations.py
|
||||||
+++ b/Doc/tools/extensions/c_annotations.py
|
===================================================================
|
||||||
@@ -118,7 +118,11 @@ def add_annotations(app: Sphinx, doctree
|
--- Python-3.12.10.orig/Doc/tools/extensions/c_annotations.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/c_annotations.py 2025-04-11 21:16:39.007011463 +0200
|
||||||
|
@@ -117,7 +117,11 @@
|
||||||
state = app.env.domaindata["c_annotations"]
|
state = app.env.domaindata["c_annotations"]
|
||||||
refcount_data = state["refcount_data"]
|
refcount_data = state["refcount_data"]
|
||||||
stable_abi_data = state["stable_abi_data"]
|
stable_abi_data = state["stable_abi_data"]
|
||||||
@ -19,9 +20,11 @@
|
|||||||
par = node.parent
|
par = node.parent
|
||||||
if par["domain"] != "c":
|
if par["domain"] != "c":
|
||||||
continue
|
continue
|
||||||
--- a/Doc/tools/extensions/glossary_search.py
|
Index: Python-3.12.10/Doc/tools/extensions/glossary_search.py
|
||||||
+++ b/Doc/tools/extensions/glossary_search.py
|
===================================================================
|
||||||
@@ -30,8 +30,16 @@ def process_glossary_nodes(
|
--- Python-3.12.10.orig/Doc/tools/extensions/glossary_search.py 2025-04-08 13:35:47.000000000 +0200
|
||||||
|
+++ Python-3.12.10/Doc/tools/extensions/glossary_search.py 2025-04-11 21:16:39.007340209 +0200
|
||||||
|
@@ -30,8 +30,16 @@
|
||||||
else:
|
else:
|
||||||
terms = app.env.glossary_terms = {}
|
terms = app.env.glossary_terms = {}
|
||||||
|
|
||||||
@ -40,17 +43,3 @@
|
|||||||
term = glossary_item[0].astext()
|
term = glossary_item[0].astext()
|
||||||
definition = glossary_item[-1]
|
definition = glossary_item[-1]
|
||||||
|
|
||||||
--- a/Doc/tools/extensions/pyspecific.py
|
|
||||||
+++ b/Doc/tools/extensions/pyspecific.py
|
|
||||||
@@ -27,7 +27,10 @@ from sphinx.locale import _ as sphinx_ge
|
|
||||||
from sphinx.util import logging
|
|
||||||
from sphinx.util.docutils import SphinxDirective
|
|
||||||
from sphinx.writers.text import TextWriter, TextTranslator
|
|
||||||
-from sphinx.util.display import status_iterator
|
|
||||||
+try:
|
|
||||||
+ from sphinx.util.display import status_iterator
|
|
||||||
+except ModuleNotFoundError:
|
|
||||||
+ from sphinx.util import status_iterator
|
|
||||||
|
|
||||||
|
|
||||||
ISSUE_URI = 'https://bugs.python.org/issue?@action=redirect&bpo=%s'
|
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
Index: Python-3.12.3/Lib/test/test_compile.py
|
---
|
||||||
===================================================================
|
Lib/test/test_compile.py | 5 +++++
|
||||||
--- Python-3.12.3.orig/Lib/test/test_compile.py
|
1 file changed, 5 insertions(+)
|
||||||
+++ Python-3.12.3/Lib/test/test_compile.py
|
|
||||||
|
--- a/Lib/test/test_compile.py
|
||||||
|
+++ b/Lib/test/test_compile.py
|
||||||
@@ -14,6 +14,9 @@ from test.support import (script_helper,
|
@@ -14,6 +14,9 @@ from test.support import (script_helper,
|
||||||
requires_specialization, C_RECURSION_LIMIT)
|
requires_specialization, C_RECURSION_LIMIT)
|
||||||
from test.support.os_helper import FakePath
|
from test.support.os_helper import FakePath
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
Create a Python.framework rather than a traditional Unix install. Optional
|
Create a Python.framework rather than a traditional Unix install. Optional
|
||||||
--- a/Misc/NEWS
|
--- a/Misc/NEWS
|
||||||
+++ b/Misc/NEWS
|
+++ b/Misc/NEWS
|
||||||
@@ -13832,7 +13832,7 @@ C API
|
@@ -15146,7 +15146,7 @@ C API
|
||||||
- bpo-40939: Removed documentation for the removed ``PyParser_*`` C API.
|
- bpo-40939: Removed documentation for the removed ``PyParser_*`` C API.
|
||||||
|
|
||||||
- bpo-43795: The list in :ref:`limited-api-list` now shows the public name
|
- bpo-43795: The list in :ref:`limited-api-list` now shows the public name
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
Lib/test/test_posix.py | 2 +-
|
|
||||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
||||||
|
|
||||||
Index: Python-3.12.2/Lib/test/test_posix.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.2.orig/Lib/test/test_posix.py
|
|
||||||
+++ Python-3.12.2/Lib/test/test_posix.py
|
|
||||||
@@ -433,7 +433,7 @@ class PosixTester(unittest.TestCase):
|
|
||||||
def test_posix_fadvise(self):
|
|
||||||
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
|
|
||||||
try:
|
|
||||||
- posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_WILLNEED)
|
|
||||||
+ posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_RANDOM)
|
|
||||||
finally:
|
|
||||||
os.close(fd)
|
|
||||||
|
|
1037
python312.changes
1037
python312.changes
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,7 @@
|
|||||||
#
|
#
|
||||||
# spec file for package python312
|
# spec file for package python312
|
||||||
#
|
#
|
||||||
# Copyright (c) 2024 SUSE LLC
|
# Copyright (c) 2025 SUSE LLC
|
||||||
#
|
#
|
||||||
# All modifications and additions to the file contributed by third parties
|
# All modifications and additions to the file contributed by third parties
|
||||||
# remain the property of their copyright owners, unless otherwise agreed
|
# remain the property of their copyright owners, unless otherwise agreed
|
||||||
@ -36,12 +36,20 @@
|
|||||||
%bcond_without general
|
%bcond_without general
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
%if 0%{?do_profiling}
|
%if 0%{?do_profiling} && !0%{?want_reproducible_builds}
|
||||||
%bcond_without profileopt
|
%bcond_without profileopt
|
||||||
%else
|
%else
|
||||||
%bcond_with profileopt
|
%bcond_with profileopt
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
|
# Only for Tumbleweed
|
||||||
|
# https://en.opensuse.org/openSUSE:Python:Externally_managed
|
||||||
|
%if 0%{?suse_version} > 1600
|
||||||
|
%bcond_without externally_managed
|
||||||
|
%else
|
||||||
|
%bcond_with externally_managed
|
||||||
|
%endif
|
||||||
|
|
||||||
%define python_pkg_name python312
|
%define python_pkg_name python312
|
||||||
%if "%{python_pkg_name}" == "%{primary_python}"
|
%if "%{python_pkg_name}" == "%{primary_python}"
|
||||||
%define primary_interpreter 1
|
%define primary_interpreter 1
|
||||||
@ -110,16 +118,17 @@
|
|||||||
# _md5.cpython-38m-x86_64-linux-gnu.so
|
# _md5.cpython-38m-x86_64-linux-gnu.so
|
||||||
%define dynlib() %{sitedir}/lib-dynload/%{1}.cpython-%{abi_tag}-%{archname}-%{_os}%{?_gnu}%{?armsuffix}.so
|
%define dynlib() %{sitedir}/lib-dynload/%{1}.cpython-%{abi_tag}-%{archname}-%{_os}%{?_gnu}%{?armsuffix}.so
|
||||||
Name: %{python_pkg_name}%{psuffix}
|
Name: %{python_pkg_name}%{psuffix}
|
||||||
Version: 3.12.5
|
Version: 3.12.11
|
||||||
Release: 0
|
Release: 0
|
||||||
Summary: Python 3 Interpreter
|
Summary: Python 3 Interpreter
|
||||||
License: Python-2.0
|
License: Python-2.0
|
||||||
URL: https://www.python.org/
|
URL: https://www.python.org/
|
||||||
Source0: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz
|
Source0: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz
|
||||||
Source1: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.asc
|
Source1: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.asc
|
||||||
Source2: baselibs.conf
|
Source2: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.sigstore
|
||||||
Source3: README.SUSE
|
Source3: baselibs.conf
|
||||||
Source4: externally_managed.in
|
Source4: README.SUSE
|
||||||
|
Source5: externally_managed.in
|
||||||
Source7: macros.python3
|
Source7: macros.python3
|
||||||
Source8: import_failed.py
|
Source8: import_failed.py
|
||||||
Source9: import_failed.map
|
Source9: import_failed.map
|
||||||
@ -152,8 +161,6 @@ Patch02: F00251-change-user-install-location.patch
|
|||||||
Patch07: python-3.3.0b1-localpath.patch
|
Patch07: python-3.3.0b1-localpath.patch
|
||||||
# replace DATE, TIME and COMPILER by fixed definitions to aid reproducible builds
|
# replace DATE, TIME and COMPILER by fixed definitions to aid reproducible builds
|
||||||
Patch08: python-3.3.0b1-fix_date_time_compiler.patch
|
Patch08: python-3.3.0b1-fix_date_time_compiler.patch
|
||||||
# POSIX_FADV_WILLNEED throws EINVAL. Use a different constant in test
|
|
||||||
Patch09: python-3.3.0b1-test-posix_fadvise.patch
|
|
||||||
# Raise timeout value for test_subprocess
|
# Raise timeout value for test_subprocess
|
||||||
Patch15: subprocess-raise-timeout.patch
|
Patch15: subprocess-raise-timeout.patch
|
||||||
# PATCH-FEATURE-UPSTREAM bpo-31046_ensurepip_honours_prefix.patch bpo#31046 mcepl@suse.com
|
# PATCH-FEATURE-UPSTREAM bpo-31046_ensurepip_honours_prefix.patch bpo#31046 mcepl@suse.com
|
||||||
@ -168,27 +175,15 @@ Patch34: skip-test_pyobject_freed_is_freed.patch
|
|||||||
# PATCH-FIX-SLE fix_configure_rst.patch bpo#43774 mcepl@suse.com
|
# PATCH-FIX-SLE fix_configure_rst.patch bpo#43774 mcepl@suse.com
|
||||||
# remove duplicate link targets and make documentation with old Sphinx in SLE
|
# remove duplicate link targets and make documentation with old Sphinx in SLE
|
||||||
Patch35: fix_configure_rst.patch
|
Patch35: fix_configure_rst.patch
|
||||||
# PATCH-FIX-UPSTREAM CVE-2023-27043-email-parsing-errors.patch bsc#1210638 mcepl@suse.com
|
|
||||||
# Detect email address parsing errors and return empty tuple to
|
|
||||||
# indicate the parsing error (old API)
|
|
||||||
Patch36: CVE-2023-27043-email-parsing-errors.patch
|
|
||||||
# PATCH-FIX-UPSTREAM CVE-2023-6597-TempDir-cleaning-symlink.patch bsc#1219666 mcepl@suse.com
|
|
||||||
# tempfile.TemporaryDirectory: fix symlink bug in cleanup (from gh#python/cpython!99930)
|
|
||||||
Patch38: CVE-2023-6597-TempDir-cleaning-symlink.patch
|
|
||||||
# PATCH-FIX-OPENSUSE CVE-2023-52425-libexpat-2.6.0-backport-15.6.patch
|
|
||||||
# This problem on libexpat is patched on 15.6 without version
|
|
||||||
# update, this patch changes the tests to match the libexpat provided
|
|
||||||
# by SUSE
|
|
||||||
Patch39: CVE-2023-52425-libexpat-2.6.0-backport-15.6.patch
|
|
||||||
# PATCH-FIX-OPENSUSE fix-test-recursion-limit-15.6.patch gh#python/cpython#115083
|
# PATCH-FIX-OPENSUSE fix-test-recursion-limit-15.6.patch gh#python/cpython#115083
|
||||||
# Skip some failing tests in test_compile for i586 arch in 15.6.
|
# Skip some failing tests in test_compile for i586 arch in 15.6.
|
||||||
Patch40: fix-test-recursion-limit-15.6.patch
|
Patch40: fix-test-recursion-limit-15.6.patch
|
||||||
# PATCH-FIX-SLE docs-docutils_014-Sphinx_420.patch bsc#[0-9]+ mcepl@suse.com
|
# PATCH-FIX-SLE docs-docutils_014-Sphinx_420.patch bsc#[0-9]+ mcepl@suse.com
|
||||||
# related to gh#python/cpython#119317
|
# related to gh#python/cpython#119317
|
||||||
Patch41: docs-docutils_014-Sphinx_420.patch
|
Patch41: docs-docutils_014-Sphinx_420.patch
|
||||||
# PATCH-FIX-UPSTREAM CVE-2024-8088-inf-loop-zipfile_Path.patch bsc#1229704 mcepl@suse.com
|
# PATCH-FIX-SLE doc-py38-to-py36.patch mcepl@suse.com
|
||||||
# avoid denial of service in zipfile
|
# Make documentation extensions working with Python 3.6
|
||||||
Patch42: CVE-2024-8088-inf-loop-zipfile_Path.patch
|
Patch44: doc-py38-to-py36.patch
|
||||||
BuildRequires: autoconf-archive
|
BuildRequires: autoconf-archive
|
||||||
BuildRequires: automake
|
BuildRequires: automake
|
||||||
BuildRequires: fdupes
|
BuildRequires: fdupes
|
||||||
@ -219,6 +214,9 @@ BuildRequires: mpdecimal-devel
|
|||||||
BuildRequires: python3-Sphinx >= 4.0.0
|
BuildRequires: python3-Sphinx >= 4.0.0
|
||||||
%if 0%{?suse_version} >= 1500
|
%if 0%{?suse_version} >= 1500
|
||||||
BuildRequires: python3-python-docs-theme >= 2022.1
|
BuildRequires: python3-python-docs-theme >= 2022.1
|
||||||
|
%if 0%{?suse_version} < 1599
|
||||||
|
BuildRequires: python3-dataclasses
|
||||||
|
%endif
|
||||||
%endif
|
%endif
|
||||||
%endif
|
%endif
|
||||||
%if %{with general}
|
%if %{with general}
|
||||||
@ -442,8 +440,7 @@ This package contains libpython3.2 shared library for embedding in
|
|||||||
other applications.
|
other applications.
|
||||||
|
|
||||||
%prep
|
%prep
|
||||||
%setup -q -n %{tarname}
|
%autosetup -p1 -n %{tarname}
|
||||||
%autopatch -p1
|
|
||||||
|
|
||||||
# Fix devhelp doc build gh#python/cpython#120150
|
# Fix devhelp doc build gh#python/cpython#120150
|
||||||
echo "master_doc = 'contents'" >> Doc/conf.py
|
echo "master_doc = 'contents'" >> Doc/conf.py
|
||||||
@ -480,7 +477,7 @@ rm Lib/site-packages/README.txt
|
|||||||
tar xvf %{SOURCE21}
|
tar xvf %{SOURCE21}
|
||||||
|
|
||||||
# Don't fail on warnings when building documentation
|
# Don't fail on warnings when building documentation
|
||||||
# sed -i -e '/^SPHINXERRORHANDLING/s/-W//' Doc/Makefile
|
sed -i -e '/^SPHINXERRORHANDLING/s/-W//' Doc/Makefile
|
||||||
|
|
||||||
%build
|
%build
|
||||||
%if %{with doc}
|
%if %{with doc}
|
||||||
@ -729,7 +726,7 @@ rm %{buildroot}%{_libdir}/libpython3.so
|
|||||||
rm %{buildroot}%{_libdir}/pkgconfig/{python3,python3-embed}.pc
|
rm %{buildroot}%{_libdir}/pkgconfig/{python3,python3-embed}.pc
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
%if %{suse_version} > 1550
|
%if %{with externally_managed}
|
||||||
# PEP-0668 mark this as a distro maintained python
|
# PEP-0668 mark this as a distro maintained python
|
||||||
sed -e 's,__PYTHONPREFIX__,%{python_pkg_name},' -e 's,__PYTHON__,python%{python_version},' < %{SOURCE4} > %{buildroot}%{sitedir}/EXTERNALLY-MANAGED
|
sed -e 's,__PYTHONPREFIX__,%{python_pkg_name},' -e 's,__PYTHON__,python%{python_version},' < %{SOURCE4} > %{buildroot}%{sitedir}/EXTERNALLY-MANAGED
|
||||||
%endif
|
%endif
|
||||||
@ -751,7 +748,7 @@ rm %{buildroot}%{_bindir}/2to3
|
|||||||
# documentation
|
# documentation
|
||||||
export PDOCS=%{buildroot}%{_docdir}/%{name}
|
export PDOCS=%{buildroot}%{_docdir}/%{name}
|
||||||
install -d -m 755 $PDOCS
|
install -d -m 755 $PDOCS
|
||||||
install -c -m 644 %{SOURCE3} $PDOCS/
|
install -c -m 644 %{SOURCE4} $PDOCS/
|
||||||
install -c -m 644 README.rst $PDOCS/
|
install -c -m 644 README.rst $PDOCS/
|
||||||
|
|
||||||
# tools
|
# tools
|
||||||
@ -770,6 +767,9 @@ install -m 755 -D Tools/gdb/libpython.py %{buildroot}%{_datadir}/gdb/auto-load/%
|
|||||||
# install devel files to /config
|
# install devel files to /config
|
||||||
#cp Makefile Makefile.pre.in Makefile.pre $RPM_BUILD_ROOT%{sitedir}/config-%{python_abi}/
|
#cp Makefile Makefile.pre.in Makefile.pre $RPM_BUILD_ROOT%{sitedir}/config-%{python_abi}/
|
||||||
|
|
||||||
|
# Remove -IVendor/ from python-config boo#1231795
|
||||||
|
sed -i 's/-IVendor\///' %{buildroot}%{_bindir}/python%{python_abi}-config
|
||||||
|
|
||||||
# RPM macros
|
# RPM macros
|
||||||
%if %{primary_interpreter}
|
%if %{primary_interpreter}
|
||||||
mkdir -p %{buildroot}%{_rpmconfigdir}/macros.d/
|
mkdir -p %{buildroot}%{_rpmconfigdir}/macros.d/
|
||||||
@ -801,6 +801,11 @@ LD_LIBRARY_PATH=. ./python -O -c "from py_compile import compile; compile('$FAIL
|
|||||||
echo %{sitedir}/_import_failed > %{buildroot}/%{sitedir}/site-packages/zzzz-import-failed-hooks.pth
|
echo %{sitedir}/_import_failed > %{buildroot}/%{sitedir}/site-packages/zzzz-import-failed-hooks.pth
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
|
# For the purposes of reproducibility, it is necessary to eliminate any *.pyc files inside documentation dirs
|
||||||
|
if [ -d %{buildroot}%{_defaultdocdir} ] ; then
|
||||||
|
find %{buildroot}%{_defaultdocdir} -type f -name \*.pyc -ls -exec rm -vf '{}' \;
|
||||||
|
fi
|
||||||
|
|
||||||
%if %{with general}
|
%if %{with general}
|
||||||
%files -n %{python_pkg_name}-tk
|
%files -n %{python_pkg_name}-tk
|
||||||
%{sitedir}/tkinter
|
%{sitedir}/tkinter
|
||||||
@ -919,7 +924,7 @@ echo %{sitedir}/_import_failed > %{buildroot}/%{sitedir}/site-packages/zzzz-impo
|
|||||||
%{_mandir}/man1/python3.1%{?ext_man}
|
%{_mandir}/man1/python3.1%{?ext_man}
|
||||||
%endif
|
%endif
|
||||||
%{_mandir}/man1/python%{python_version}.1%{?ext_man}
|
%{_mandir}/man1/python%{python_version}.1%{?ext_man}
|
||||||
%if %{suse_version} > 1550
|
%if %{with externally_managed}
|
||||||
# PEP-0668
|
# PEP-0668
|
||||||
%{sitedir}/EXTERNALLY-MANAGED
|
%{sitedir}/EXTERNALLY-MANAGED
|
||||||
%endif
|
%endif
|
||||||
|
@ -1,18 +1,21 @@
|
|||||||
---
|
---
|
||||||
Lib/test/test_subprocess.py | 3 ++-
|
Lib/test/test_subprocess.py | 6 +++++-
|
||||||
1 file changed, 2 insertions(+), 1 deletion(-)
|
1 file changed, 5 insertions(+), 1 deletion(-)
|
||||||
|
|
||||||
Index: Python-3.12.4/Lib/test/test_subprocess.py
|
Index: Python-3.12.10/Lib/test/test_subprocess.py
|
||||||
===================================================================
|
===================================================================
|
||||||
--- Python-3.12.4.orig/Lib/test/test_subprocess.py
|
--- Python-3.12.10.orig/Lib/test/test_subprocess.py 2025-04-11 21:04:45.154639562 +0200
|
||||||
+++ Python-3.12.4/Lib/test/test_subprocess.py
|
+++ Python-3.12.10/Lib/test/test_subprocess.py 2025-04-11 21:12:03.374471647 +0200
|
||||||
@@ -280,7 +280,8 @@ class ProcessTestCase(BaseTestCase):
|
@@ -274,7 +274,11 @@
|
||||||
"time.sleep(3600)"],
|
output = subprocess.check_output(
|
||||||
# Some heavily loaded buildbots (sparc Debian 3.x) require
|
[sys.executable, "-c",
|
||||||
# this much time to start and print.
|
"import time; time.sleep(3600)"],
|
||||||
- timeout=3)
|
- timeout=0.1)
|
||||||
|
+ # Some heavily loaded buildbots (sparc Debian 3.x) require
|
||||||
|
+ # this much time to start and print.
|
||||||
|
+ # timeout=0.1)
|
||||||
+ # OBS might require even more
|
+ # OBS might require even more
|
||||||
+ timeout=10)
|
+ timeout=10)
|
||||||
self.fail("Expected TimeoutExpired.")
|
|
||||||
self.assertEqual(c.exception.output, b'BDFL')
|
|
||||||
|
|
||||||
|
def test_call_kwargs(self):
|
||||||
|
# call() function with keyword args
|
||||||
|
Loading…
x
Reference in New Issue
Block a user