forked from pool/python312
Compare commits
39 Commits
Author | SHA256 | Date | |
---|---|---|---|
b45169abf8 | |||
32717178fc | |||
f7e695cbd6 | |||
0496c93f4b | |||
30f651fd15 | |||
eacdd5e9b5 | |||
bae099bfd7 | |||
c062335ad2 | |||
4fcdd05e86 | |||
24c111965b | |||
|
c4b3c6583b | ||
d058a99b8a | |||
|
9431cf257f | ||
e85ec7c286 | |||
652065b794 | |||
a7439aaf5b | |||
694498a6a8 | |||
8a08246ce9 | |||
b9104c7cad | |||
|
d5a3615b78 | ||
82050fef68 | |||
|
094ec27e0f | ||
f07b688f29 | |||
06a5cb31be | |||
803cb95998 | |||
cd88adc808 | |||
|
118ac765b0 | ||
2f2e126886 | |||
8c2f054df4 | |||
957ff77855 | |||
2aeb619628 | |||
38ff7e3150 | |||
ec208c83f9 | |||
|
e64f032e0a | ||
a00145be7f | |||
|
5d2f502703 | ||
c75ef22ae5 | |||
10154267fc | |||
456c5f3ff6 |
@ -1,474 +0,0 @@
|
|||||||
From 4a153a1d3b18803a684cd1bcc2cdf3ede3dbae19 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Victor Stinner <vstinner@python.org>
|
|
||||||
Date: Fri, 15 Dec 2023 16:10:40 +0100
|
|
||||||
Subject: [PATCH] [CVE-2023-27043] gh-102988: Reject malformed addresses in
|
|
||||||
email.parseaddr() (#111116)
|
|
||||||
|
|
||||||
Detect email address parsing errors and return empty tuple to
|
|
||||||
indicate the parsing error (old API). Add an optional 'strict'
|
|
||||||
parameter to getaddresses() and parseaddr() functions. Patch by
|
|
||||||
Thomas Dwyer.
|
|
||||||
|
|
||||||
Co-Authored-By: Thomas Dwyer <github@tomd.tel>
|
|
||||||
---
|
|
||||||
Doc/library/email.utils.rst | 19 -
|
|
||||||
Lib/email/utils.py | 151 +++++++-
|
|
||||||
Lib/test/test_email/test_email.py | 187 +++++++++-
|
|
||||||
Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst | 8
|
|
||||||
4 files changed, 344 insertions(+), 21 deletions(-)
|
|
||||||
create mode 100644 Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
|
||||||
|
|
||||||
--- a/Doc/library/email.utils.rst
|
|
||||||
+++ b/Doc/library/email.utils.rst
|
|
||||||
@@ -58,13 +58,18 @@ of the new API.
|
|
||||||
begins with angle brackets, they are stripped off.
|
|
||||||
|
|
||||||
|
|
||||||
-.. function:: parseaddr(address)
|
|
||||||
+.. function:: parseaddr(address, *, strict=True)
|
|
||||||
|
|
||||||
Parse address -- which should be the value of some address-containing field such
|
|
||||||
as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and
|
|
||||||
*email address* parts. Returns a tuple of that information, unless the parse
|
|
||||||
fails, in which case a 2-tuple of ``('', '')`` is returned.
|
|
||||||
|
|
||||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
|
||||||
+
|
|
||||||
+ .. versionchanged:: 3.13
|
|
||||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
|
||||||
+
|
|
||||||
|
|
||||||
.. function:: formataddr(pair, charset='utf-8')
|
|
||||||
|
|
||||||
@@ -82,12 +87,15 @@ of the new API.
|
|
||||||
Added the *charset* option.
|
|
||||||
|
|
||||||
|
|
||||||
-.. function:: getaddresses(fieldvalues)
|
|
||||||
+.. function:: getaddresses(fieldvalues, *, strict=True)
|
|
||||||
|
|
||||||
This method returns a list of 2-tuples of the form returned by ``parseaddr()``.
|
|
||||||
*fieldvalues* is a sequence of header field values as might be returned by
|
|
||||||
- :meth:`Message.get_all <email.message.Message.get_all>`. Here's a simple
|
|
||||||
- example that gets all the recipients of a message::
|
|
||||||
+ :meth:`Message.get_all <email.message.Message.get_all>`.
|
|
||||||
+
|
|
||||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
|
||||||
+
|
|
||||||
+ Here's a simple example that gets all the recipients of a message::
|
|
||||||
|
|
||||||
from email.utils import getaddresses
|
|
||||||
|
|
||||||
@@ -97,6 +105,9 @@ of the new API.
|
|
||||||
resent_ccs = msg.get_all('resent-cc', [])
|
|
||||||
all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
|
|
||||||
|
|
||||||
+ .. versionchanged:: 3.13
|
|
||||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
|
||||||
+
|
|
||||||
|
|
||||||
.. function:: parsedate(date)
|
|
||||||
|
|
||||||
--- a/Lib/email/utils.py
|
|
||||||
+++ b/Lib/email/utils.py
|
|
||||||
@@ -48,6 +48,7 @@ TICK = "'"
|
|
||||||
specialsre = re.compile(r'[][\\()<>@,:;".]')
|
|
||||||
escapesre = re.compile(r'[\\"]')
|
|
||||||
|
|
||||||
+
|
|
||||||
def _has_surrogates(s):
|
|
||||||
"""Return True if s may contain surrogate-escaped binary data."""
|
|
||||||
# This check is based on the fact that unless there are surrogates, utf8
|
|
||||||
@@ -106,12 +107,127 @@ def formataddr(pair, charset='utf-8'):
|
|
||||||
return address
|
|
||||||
|
|
||||||
|
|
||||||
+def _iter_escaped_chars(addr):
|
|
||||||
+ pos = 0
|
|
||||||
+ escape = False
|
|
||||||
+ for pos, ch in enumerate(addr):
|
|
||||||
+ if escape:
|
|
||||||
+ yield (pos, '\\' + ch)
|
|
||||||
+ escape = False
|
|
||||||
+ elif ch == '\\':
|
|
||||||
+ escape = True
|
|
||||||
+ else:
|
|
||||||
+ yield (pos, ch)
|
|
||||||
+ if escape:
|
|
||||||
+ yield (pos, '\\')
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _strip_quoted_realnames(addr):
|
|
||||||
+ """Strip real names between quotes."""
|
|
||||||
+ if '"' not in addr:
|
|
||||||
+ # Fast path
|
|
||||||
+ return addr
|
|
||||||
+
|
|
||||||
+ start = 0
|
|
||||||
+ open_pos = None
|
|
||||||
+ result = []
|
|
||||||
+ for pos, ch in _iter_escaped_chars(addr):
|
|
||||||
+ if ch == '"':
|
|
||||||
+ if open_pos is None:
|
|
||||||
+ open_pos = pos
|
|
||||||
+ else:
|
|
||||||
+ if start != open_pos:
|
|
||||||
+ result.append(addr[start:open_pos])
|
|
||||||
+ start = pos + 1
|
|
||||||
+ open_pos = None
|
|
||||||
|
|
||||||
-def getaddresses(fieldvalues):
|
|
||||||
- """Return a list of (REALNAME, EMAIL) for each fieldvalue."""
|
|
||||||
- all = COMMASPACE.join(str(v) for v in fieldvalues)
|
|
||||||
- a = _AddressList(all)
|
|
||||||
- return a.addresslist
|
|
||||||
+ if start < len(addr):
|
|
||||||
+ result.append(addr[start:])
|
|
||||||
+
|
|
||||||
+ return ''.join(result)
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+supports_strict_parsing = True
|
|
||||||
+
|
|
||||||
+def getaddresses(fieldvalues, *, strict=True):
|
|
||||||
+ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue.
|
|
||||||
+
|
|
||||||
+ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in
|
|
||||||
+ its place.
|
|
||||||
+
|
|
||||||
+ If strict is true, use a strict parser which rejects malformed inputs.
|
|
||||||
+ """
|
|
||||||
+
|
|
||||||
+ # If strict is true, if the resulting list of parsed addresses is greater
|
|
||||||
+ # than the number of fieldvalues in the input list, a parsing error has
|
|
||||||
+ # occurred and consequently a list containing a single empty 2-tuple [('',
|
|
||||||
+ # '')] is returned in its place. This is done to avoid invalid output.
|
|
||||||
+ #
|
|
||||||
+ # Malformed input: getaddresses(['alice@example.com <bob@example.com>'])
|
|
||||||
+ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')]
|
|
||||||
+ # Safe output: [('', '')]
|
|
||||||
+
|
|
||||||
+ if not strict:
|
|
||||||
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
|
|
||||||
+ a = _AddressList(all)
|
|
||||||
+ return a.addresslist
|
|
||||||
+
|
|
||||||
+ fieldvalues = [str(v) for v in fieldvalues]
|
|
||||||
+ fieldvalues = _pre_parse_validation(fieldvalues)
|
|
||||||
+ addr = COMMASPACE.join(fieldvalues)
|
|
||||||
+ a = _AddressList(addr)
|
|
||||||
+ result = _post_parse_validation(a.addresslist)
|
|
||||||
+
|
|
||||||
+ # Treat output as invalid if the number of addresses is not equal to the
|
|
||||||
+ # expected number of addresses.
|
|
||||||
+ n = 0
|
|
||||||
+ for v in fieldvalues:
|
|
||||||
+ # When a comma is used in the Real Name part it is not a deliminator.
|
|
||||||
+ # So strip those out before counting the commas.
|
|
||||||
+ v = _strip_quoted_realnames(v)
|
|
||||||
+ # Expected number of addresses: 1 + number of commas
|
|
||||||
+ n += 1 + v.count(',')
|
|
||||||
+ if len(result) != n:
|
|
||||||
+ return [('', '')]
|
|
||||||
+
|
|
||||||
+ return result
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _check_parenthesis(addr):
|
|
||||||
+ # Ignore parenthesis in quoted real names.
|
|
||||||
+ addr = _strip_quoted_realnames(addr)
|
|
||||||
+
|
|
||||||
+ opens = 0
|
|
||||||
+ for pos, ch in _iter_escaped_chars(addr):
|
|
||||||
+ if ch == '(':
|
|
||||||
+ opens += 1
|
|
||||||
+ elif ch == ')':
|
|
||||||
+ opens -= 1
|
|
||||||
+ if opens < 0:
|
|
||||||
+ return False
|
|
||||||
+ return (opens == 0)
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _pre_parse_validation(email_header_fields):
|
|
||||||
+ accepted_values = []
|
|
||||||
+ for v in email_header_fields:
|
|
||||||
+ if not _check_parenthesis(v):
|
|
||||||
+ v = "('', '')"
|
|
||||||
+ accepted_values.append(v)
|
|
||||||
+
|
|
||||||
+ return accepted_values
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+def _post_parse_validation(parsed_email_header_tuples):
|
|
||||||
+ accepted_values = []
|
|
||||||
+ # The parser would have parsed a correctly formatted domain-literal
|
|
||||||
+ # The existence of an [ after parsing indicates a parsing failure
|
|
||||||
+ for v in parsed_email_header_tuples:
|
|
||||||
+ if '[' in v[1]:
|
|
||||||
+ v = ('', '')
|
|
||||||
+ accepted_values.append(v)
|
|
||||||
+
|
|
||||||
+ return accepted_values
|
|
||||||
|
|
||||||
|
|
||||||
def _format_timetuple_and_zone(timetuple, zone):
|
|
||||||
@@ -205,16 +321,33 @@ def parsedate_to_datetime(data):
|
|
||||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))
|
|
||||||
|
|
||||||
|
|
||||||
-def parseaddr(addr):
|
|
||||||
+def parseaddr(addr, *, strict=True):
|
|
||||||
"""
|
|
||||||
Parse addr into its constituent realname and email address parts.
|
|
||||||
|
|
||||||
Return a tuple of realname and email address, unless the parse fails, in
|
|
||||||
which case return a 2-tuple of ('', '').
|
|
||||||
+
|
|
||||||
+ If strict is True, use a strict parser which rejects malformed inputs.
|
|
||||||
"""
|
|
||||||
- addrs = _AddressList(addr).addresslist
|
|
||||||
- if not addrs:
|
|
||||||
- return '', ''
|
|
||||||
+ if not strict:
|
|
||||||
+ addrs = _AddressList(addr).addresslist
|
|
||||||
+ if not addrs:
|
|
||||||
+ return ('', '')
|
|
||||||
+ return addrs[0]
|
|
||||||
+
|
|
||||||
+ if isinstance(addr, list):
|
|
||||||
+ addr = addr[0]
|
|
||||||
+
|
|
||||||
+ if not isinstance(addr, str):
|
|
||||||
+ return ('', '')
|
|
||||||
+
|
|
||||||
+ addr = _pre_parse_validation([addr])[0]
|
|
||||||
+ addrs = _post_parse_validation(_AddressList(addr).addresslist)
|
|
||||||
+
|
|
||||||
+ if not addrs or len(addrs) > 1:
|
|
||||||
+ return ('', '')
|
|
||||||
+
|
|
||||||
return addrs[0]
|
|
||||||
|
|
||||||
|
|
||||||
--- a/Lib/test/test_email/test_email.py
|
|
||||||
+++ b/Lib/test/test_email/test_email.py
|
|
||||||
@@ -16,6 +16,7 @@ from unittest.mock import patch
|
|
||||||
|
|
||||||
import email
|
|
||||||
import email.policy
|
|
||||||
+import email.utils
|
|
||||||
|
|
||||||
from email.charset import Charset
|
|
||||||
from email.generator import Generator, DecodedGenerator, BytesGenerator
|
|
||||||
@@ -3352,15 +3353,137 @@ Foo
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
+ def test_parsing_errors(self):
|
|
||||||
+ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056"""
|
|
||||||
+ alice = 'alice@example.org'
|
|
||||||
+ bob = 'bob@example.com'
|
|
||||||
+ empty = ('', '')
|
|
||||||
+
|
|
||||||
+ # Test utils.getaddresses() and utils.parseaddr() on malformed email
|
|
||||||
+ # addresses: default behavior (strict=True) rejects malformed address,
|
|
||||||
+ # and strict=False which tolerates malformed address.
|
|
||||||
+ for invalid_separator, expected_non_strict in (
|
|
||||||
+ ('(', [(f'<{bob}>', alice)]),
|
|
||||||
+ (')', [('', alice), empty, ('', bob)]),
|
|
||||||
+ ('<', [('', alice), empty, ('', bob), empty]),
|
|
||||||
+ ('>', [('', alice), empty, ('', bob)]),
|
|
||||||
+ ('[', [('', f'{alice}[<{bob}>]')]),
|
|
||||||
+ (']', [('', alice), empty, ('', bob)]),
|
|
||||||
+ ('@', [empty, empty, ('', bob)]),
|
|
||||||
+ (';', [('', alice), empty, ('', bob)]),
|
|
||||||
+ (':', [('', alice), ('', bob)]),
|
|
||||||
+ ('.', [('', alice + '.'), ('', bob)]),
|
|
||||||
+ ('"', [('', alice), ('', f'<{bob}>')]),
|
|
||||||
+ ):
|
|
||||||
+ address = f'{alice}{invalid_separator}<{bob}>'
|
|
||||||
+ with self.subTest(address=address):
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]),
|
|
||||||
+ [empty])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ expected_non_strict)
|
|
||||||
+
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]),
|
|
||||||
+ empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Comma (',') is treated differently depending on strict parameter.
|
|
||||||
+ # Comma without quotes.
|
|
||||||
+ address = f'{alice},<{bob}>'
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]),
|
|
||||||
+ [('', alice), ('', bob)])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ [('', alice), ('', bob)])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]),
|
|
||||||
+ empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Real name between quotes containing comma.
|
|
||||||
+ address = '"Alice, alice@example.org" <bob@example.com>'
|
|
||||||
+ expected_strict = ('Alice, alice@example.org', 'bob@example.com')
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Valid parenthesis in comments.
|
|
||||||
+ address = 'alice@example.org (Alice)'
|
|
||||||
+ expected_strict = ('Alice', 'alice@example.org')
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Invalid parenthesis in comments.
|
|
||||||
+ address = 'alice@example.org )Alice('
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]), [empty])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ [('', 'alice@example.org'), ('', ''), ('', 'Alice')])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Two addresses with quotes separated by comma.
|
|
||||||
+ address = '"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>'
|
|
||||||
+ self.assertEqual(utils.getaddresses([address]),
|
|
||||||
+ [('Jane Doe', 'jane@example.net'),
|
|
||||||
+ ('John Doe', 'john@example.net')])
|
|
||||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
|
||||||
+ [('Jane Doe', 'jane@example.net'),
|
|
||||||
+ ('John Doe', 'john@example.net')])
|
|
||||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
|
||||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
|
||||||
+ ('', address))
|
|
||||||
+
|
|
||||||
+ # Test email.utils.supports_strict_parsing attribute
|
|
||||||
+ self.assertEqual(email.utils.supports_strict_parsing, True)
|
|
||||||
+
|
|
||||||
def test_getaddresses_nasty(self):
|
|
||||||
- eq = self.assertEqual
|
|
||||||
- eq(utils.getaddresses(['foo: ;']), [('', '')])
|
|
||||||
- eq(utils.getaddresses(
|
|
||||||
- ['[]*-- =~$']),
|
|
||||||
- [('', ''), ('', ''), ('', '*--')])
|
|
||||||
- eq(utils.getaddresses(
|
|
||||||
- ['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
|
|
||||||
- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
|
|
||||||
+ for addresses, expected in (
|
|
||||||
+ (['"Sürname, Firstname" <to@example.com>'],
|
|
||||||
+ [('Sürname, Firstname', 'to@example.com')]),
|
|
||||||
+
|
|
||||||
+ (['foo: ;'],
|
|
||||||
+ [('', '')]),
|
|
||||||
+
|
|
||||||
+ (['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>'],
|
|
||||||
+ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]),
|
|
||||||
+
|
|
||||||
+ ([r'Pete(A nice \) chap) <pete(his account)@silly.test(his host)>'],
|
|
||||||
+ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]),
|
|
||||||
+
|
|
||||||
+ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'],
|
|
||||||
+ [('', '')]),
|
|
||||||
+
|
|
||||||
+ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'],
|
|
||||||
+ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]),
|
|
||||||
+
|
|
||||||
+ (['John Doe <jdoe@machine(comment). example>'],
|
|
||||||
+ [('John Doe (comment)', 'jdoe@machine.example')]),
|
|
||||||
+
|
|
||||||
+ (['"Mary Smith: Personal Account" <smith@home.example>'],
|
|
||||||
+ [('Mary Smith: Personal Account', 'smith@home.example')]),
|
|
||||||
+
|
|
||||||
+ (['Undisclosed recipients:;'],
|
|
||||||
+ [('', '')]),
|
|
||||||
+
|
|
||||||
+ ([r'<boss@nil.test>, "Giant; \"Big\" Box" <bob@example.net>'],
|
|
||||||
+ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]),
|
|
||||||
+ ):
|
|
||||||
+ with self.subTest(addresses=addresses):
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses),
|
|
||||||
+ expected)
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
|
||||||
+ expected)
|
|
||||||
+
|
|
||||||
+ addresses = ['[]*-- =~$']
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses),
|
|
||||||
+ [('', '')])
|
|
||||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
|
||||||
+ [('', ''), ('', ''), ('', '*--')])
|
|
||||||
|
|
||||||
def test_getaddresses_embedded_comment(self):
|
|
||||||
"""Test proper handling of a nested comment"""
|
|
||||||
@@ -3551,6 +3674,54 @@ multipart/report
|
|
||||||
m = cls(*constructor, policy=email.policy.default)
|
|
||||||
self.assertIs(m.policy, email.policy.default)
|
|
||||||
|
|
||||||
+ def test_iter_escaped_chars(self):
|
|
||||||
+ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')),
|
|
||||||
+ [(0, 'a'),
|
|
||||||
+ (2, '\\\\'),
|
|
||||||
+ (3, 'b'),
|
|
||||||
+ (5, '\\"'),
|
|
||||||
+ (6, 'c'),
|
|
||||||
+ (8, '\\\\'),
|
|
||||||
+ (9, '"'),
|
|
||||||
+ (10, 'd')])
|
|
||||||
+ self.assertEqual(list(utils._iter_escaped_chars('a\\')),
|
|
||||||
+ [(0, 'a'), (1, '\\')])
|
|
||||||
+
|
|
||||||
+ def test_strip_quoted_realnames(self):
|
|
||||||
+ def check(addr, expected):
|
|
||||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), expected)
|
|
||||||
+
|
|
||||||
+ check('"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>',
|
|
||||||
+ ' <jane@example.net>, <john@example.net>')
|
|
||||||
+ check(r'"Jane \"Doe\"." <jane@example.net>',
|
|
||||||
+ ' <jane@example.net>')
|
|
||||||
+
|
|
||||||
+ # special cases
|
|
||||||
+ check(r'before"name"after', 'beforeafter')
|
|
||||||
+ check(r'before"name"', 'before')
|
|
||||||
+ check(r'b"name"', 'b') # single char
|
|
||||||
+ check(r'"name"after', 'after')
|
|
||||||
+ check(r'"name"a', 'a') # single char
|
|
||||||
+ check(r'"name"', '')
|
|
||||||
+
|
|
||||||
+ # no change
|
|
||||||
+ for addr in (
|
|
||||||
+ 'Jane Doe <jane@example.net>, John Doe <john@example.net>',
|
|
||||||
+ 'lone " quote',
|
|
||||||
+ ):
|
|
||||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), addr)
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+ def test_check_parenthesis(self):
|
|
||||||
+ addr = 'alice@example.net'
|
|
||||||
+ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)'))
|
|
||||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice('))
|
|
||||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))'))
|
|
||||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)'))
|
|
||||||
+
|
|
||||||
+ # Ignore real name between quotes
|
|
||||||
+ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}'))
|
|
||||||
+
|
|
||||||
|
|
||||||
# Test the iterator/generators
|
|
||||||
class TestIterators(TestEmailBase):
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
|
||||||
@@ -0,0 +1,8 @@
|
|
||||||
+:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now
|
|
||||||
+return ``('', '')`` 2-tuples in more situations where invalid email
|
|
||||||
+addresses are encountered instead of potentially inaccurate values. Add
|
|
||||||
+optional *strict* parameter to these two functions: use ``strict=False`` to
|
|
||||||
+get the old behavior, accept malformed inputs.
|
|
||||||
+``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check
|
|
||||||
+if the *strict* paramater is available. Patch by Thomas Dwyer and Victor
|
|
||||||
+Stinner to improve the CVE-2023-27043 fix.
|
|
@ -1,7 +1,36 @@
|
|||||||
Index: Python-3.12.3/Lib/test/test_xml_etree.py
|
---
|
||||||
===================================================================
|
Lib/test/test_pyexpat.py | 4 ++++
|
||||||
--- Python-3.12.3.orig/Lib/test/test_xml_etree.py
|
Lib/test/test_sax.py | 3 +++
|
||||||
+++ Python-3.12.3/Lib/test/test_xml_etree.py
|
Lib/test/test_xml_etree.py | 10 ++++++++++
|
||||||
|
3 files changed, 17 insertions(+)
|
||||||
|
|
||||||
|
--- a/Lib/test/test_pyexpat.py
|
||||||
|
+++ b/Lib/test/test_pyexpat.py
|
||||||
|
@@ -794,6 +794,10 @@ class ReparseDeferralTest(unittest.TestC
|
||||||
|
self.assertEqual(started, ['doc'])
|
||||||
|
|
||||||
|
def test_reparse_deferral_disabled(self):
|
||||||
|
+ if expat.version_info < (2, 6, 0):
|
||||||
|
+ self.skipTest(f'Expat {expat.version_info} does not '
|
||||||
|
+ 'support reparse deferral')
|
||||||
|
+
|
||||||
|
started = []
|
||||||
|
|
||||||
|
def start_element(name, _):
|
||||||
|
--- a/Lib/test/test_sax.py
|
||||||
|
+++ b/Lib/test/test_sax.py
|
||||||
|
@@ -1240,6 +1240,9 @@ class ExpatReaderTest(XmlTestBase):
|
||||||
|
|
||||||
|
self.assertEqual(result.getvalue(), start + b"<doc></doc>")
|
||||||
|
|
||||||
|
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
||||||
|
+ f'Expat {pyexpat.version_info} does not '
|
||||||
|
+ 'support reparse deferral')
|
||||||
|
def test_flush_reparse_deferral_disabled(self):
|
||||||
|
result = BytesIO()
|
||||||
|
xmlgen = XMLGenerator(result)
|
||||||
|
--- a/Lib/test/test_xml_etree.py
|
||||||
|
+++ b/Lib/test/test_xml_etree.py
|
||||||
@@ -121,6 +121,11 @@ ATTLIST_XML = """\
|
@@ -121,6 +121,11 @@ ATTLIST_XML = """\
|
||||||
</foo>
|
</foo>
|
||||||
"""
|
"""
|
||||||
@ -36,32 +65,3 @@ Index: Python-3.12.3/Lib/test/test_xml_etree.py
|
|||||||
def test_flush_reparse_deferral_disabled(self):
|
def test_flush_reparse_deferral_disabled(self):
|
||||||
parser = ET.XMLPullParser(events=('start', 'end'))
|
parser = ET.XMLPullParser(events=('start', 'end'))
|
||||||
|
|
||||||
Index: Python-3.12.3/Lib/test/test_sax.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.3.orig/Lib/test/test_sax.py
|
|
||||||
+++ Python-3.12.3/Lib/test/test_sax.py
|
|
||||||
@@ -1240,6 +1240,9 @@ class ExpatReaderTest(XmlTestBase):
|
|
||||||
|
|
||||||
self.assertEqual(result.getvalue(), start + b"<doc></doc>")
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ f'Expat {pyexpat.version_info} does not '
|
|
||||||
+ 'support reparse deferral')
|
|
||||||
def test_flush_reparse_deferral_disabled(self):
|
|
||||||
result = BytesIO()
|
|
||||||
xmlgen = XMLGenerator(result)
|
|
||||||
Index: Python-3.12.3/Lib/test/test_pyexpat.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.3.orig/Lib/test/test_pyexpat.py
|
|
||||||
+++ Python-3.12.3/Lib/test/test_pyexpat.py
|
|
||||||
@@ -794,6 +794,10 @@ class ReparseDeferralTest(unittest.TestC
|
|
||||||
self.assertEqual(started, ['doc'])
|
|
||||||
|
|
||||||
def test_reparse_deferral_disabled(self):
|
|
||||||
+ if expat.version_info < (2, 6, 0):
|
|
||||||
+ self.skipTest(f'Expat {expat.version_info} does not '
|
|
||||||
+ 'support reparse deferral')
|
|
||||||
+
|
|
||||||
started = []
|
|
||||||
|
|
||||||
def start_element(name, _):
|
|
||||||
|
@ -1,171 +0,0 @@
|
|||||||
---
|
|
||||||
Lib/tempfile.py | 16 +
|
|
||||||
Lib/test/test_tempfile.py | 113 ++++++++++
|
|
||||||
Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst | 2
|
|
||||||
3 files changed, 131 insertions(+)
|
|
||||||
|
|
||||||
Index: Python-3.12.4/Lib/tempfile.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.4.orig/Lib/tempfile.py
|
|
||||||
+++ Python-3.12.4/Lib/tempfile.py
|
|
||||||
@@ -285,6 +285,22 @@ def _resetperms(path):
|
|
||||||
_dont_follow_symlinks(chflags, path, 0)
|
|
||||||
_dont_follow_symlinks(_os.chmod, path, 0o700)
|
|
||||||
|
|
||||||
+def _dont_follow_symlinks(func, path, *args):
|
|
||||||
+ # Pass follow_symlinks=False, unless not supported on this platform.
|
|
||||||
+ if func in _os.supports_follow_symlinks:
|
|
||||||
+ func(path, *args, follow_symlinks=False)
|
|
||||||
+ elif _os.name == 'nt' or not _os.path.islink(path):
|
|
||||||
+ func(path, *args)
|
|
||||||
+
|
|
||||||
+def _resetperms(path):
|
|
||||||
+ try:
|
|
||||||
+ chflags = _os.chflags
|
|
||||||
+ except AttributeError:
|
|
||||||
+ pass
|
|
||||||
+ else:
|
|
||||||
+ _dont_follow_symlinks(chflags, path, 0)
|
|
||||||
+ _dont_follow_symlinks(_os.chmod, path, 0o700)
|
|
||||||
+
|
|
||||||
|
|
||||||
# User visible interfaces.
|
|
||||||
|
|
||||||
Index: Python-3.12.4/Lib/test/test_tempfile.py
|
|
||||||
===================================================================
|
|
||||||
--- Python-3.12.4.orig/Lib/test/test_tempfile.py
|
|
||||||
+++ Python-3.12.4/Lib/test/test_tempfile.py
|
|
||||||
@@ -1803,6 +1803,103 @@ class TestTemporaryDirectory(BaseTestCas
|
|
||||||
new_flags = os.stat(dir1).st_flags
|
|
||||||
self.assertEqual(new_flags, old_flags)
|
|
||||||
|
|
||||||
+ @os_helper.skip_unless_symlink
|
|
||||||
+ def test_cleanup_with_symlink_modes(self):
|
|
||||||
+ # cleanup() should not follow symlinks when fixing mode bits (#91133)
|
|
||||||
+ with self.do_create(recurse=0) as d2:
|
|
||||||
+ file1 = os.path.join(d2, 'file1')
|
|
||||||
+ open(file1, 'wb').close()
|
|
||||||
+ dir1 = os.path.join(d2, 'dir1')
|
|
||||||
+ os.mkdir(dir1)
|
|
||||||
+ for mode in range(8):
|
|
||||||
+ mode <<= 6
|
|
||||||
+ with self.subTest(mode=format(mode, '03o')):
|
|
||||||
+ def test(target, target_is_directory):
|
|
||||||
+ d1 = self.do_create(recurse=0)
|
|
||||||
+ symlink = os.path.join(d1.name, 'symlink')
|
|
||||||
+ os.symlink(target, symlink,
|
|
||||||
+ target_is_directory=target_is_directory)
|
|
||||||
+ try:
|
|
||||||
+ os.chmod(symlink, mode, follow_symlinks=False)
|
|
||||||
+ except NotImplementedError:
|
|
||||||
+ pass
|
|
||||||
+ try:
|
|
||||||
+ os.chmod(symlink, mode)
|
|
||||||
+ except FileNotFoundError:
|
|
||||||
+ pass
|
|
||||||
+ os.chmod(d1.name, mode)
|
|
||||||
+ d1.cleanup()
|
|
||||||
+ self.assertFalse(os.path.exists(d1.name))
|
|
||||||
+
|
|
||||||
+ with self.subTest('nonexisting file'):
|
|
||||||
+ test('nonexisting', target_is_directory=False)
|
|
||||||
+ with self.subTest('nonexisting dir'):
|
|
||||||
+ test('nonexisting', target_is_directory=True)
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing file'):
|
|
||||||
+ os.chmod(file1, mode)
|
|
||||||
+ old_mode = os.stat(file1).st_mode
|
|
||||||
+ test(file1, target_is_directory=False)
|
|
||||||
+ new_mode = os.stat(file1).st_mode
|
|
||||||
+ self.assertEqual(new_mode, old_mode,
|
|
||||||
+ '%03o != %03o' % (new_mode, old_mode))
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing dir'):
|
|
||||||
+ os.chmod(dir1, mode)
|
|
||||||
+ old_mode = os.stat(dir1).st_mode
|
|
||||||
+ test(dir1, target_is_directory=True)
|
|
||||||
+ new_mode = os.stat(dir1).st_mode
|
|
||||||
+ self.assertEqual(new_mode, old_mode,
|
|
||||||
+ '%03o != %03o' % (new_mode, old_mode))
|
|
||||||
+
|
|
||||||
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
|
|
||||||
+ @os_helper.skip_unless_symlink
|
|
||||||
+ def test_cleanup_with_symlink_flags(self):
|
|
||||||
+ # cleanup() should not follow symlinks when fixing flags (#91133)
|
|
||||||
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
|
|
||||||
+ self.check_flags(flags)
|
|
||||||
+
|
|
||||||
+ with self.do_create(recurse=0) as d2:
|
|
||||||
+ file1 = os.path.join(d2, 'file1')
|
|
||||||
+ open(file1, 'wb').close()
|
|
||||||
+ dir1 = os.path.join(d2, 'dir1')
|
|
||||||
+ os.mkdir(dir1)
|
|
||||||
+ def test(target, target_is_directory):
|
|
||||||
+ d1 = self.do_create(recurse=0)
|
|
||||||
+ symlink = os.path.join(d1.name, 'symlink')
|
|
||||||
+ os.symlink(target, symlink,
|
|
||||||
+ target_is_directory=target_is_directory)
|
|
||||||
+ try:
|
|
||||||
+ os.chflags(symlink, flags, follow_symlinks=False)
|
|
||||||
+ except NotImplementedError:
|
|
||||||
+ pass
|
|
||||||
+ try:
|
|
||||||
+ os.chflags(symlink, flags)
|
|
||||||
+ except FileNotFoundError:
|
|
||||||
+ pass
|
|
||||||
+ os.chflags(d1.name, flags)
|
|
||||||
+ d1.cleanup()
|
|
||||||
+ self.assertFalse(os.path.exists(d1.name))
|
|
||||||
+
|
|
||||||
+ with self.subTest('nonexisting file'):
|
|
||||||
+ test('nonexisting', target_is_directory=False)
|
|
||||||
+ with self.subTest('nonexisting dir'):
|
|
||||||
+ test('nonexisting', target_is_directory=True)
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing file'):
|
|
||||||
+ os.chflags(file1, flags)
|
|
||||||
+ old_flags = os.stat(file1).st_flags
|
|
||||||
+ test(file1, target_is_directory=False)
|
|
||||||
+ new_flags = os.stat(file1).st_flags
|
|
||||||
+ self.assertEqual(new_flags, old_flags)
|
|
||||||
+
|
|
||||||
+ with self.subTest('existing dir'):
|
|
||||||
+ os.chflags(dir1, flags)
|
|
||||||
+ old_flags = os.stat(dir1).st_flags
|
|
||||||
+ test(dir1, target_is_directory=True)
|
|
||||||
+ new_flags = os.stat(dir1).st_flags
|
|
||||||
+ self.assertEqual(new_flags, old_flags)
|
|
||||||
+
|
|
||||||
@support.cpython_only
|
|
||||||
def test_del_on_collection(self):
|
|
||||||
# A TemporaryDirectory is deleted when garbage collected
|
|
||||||
@@ -1977,6 +2074,22 @@ class TestTemporaryDirectory(BaseTestCas
|
|
||||||
|
|
||||||
def check_flags(self, flags):
|
|
||||||
# skip the test if these flags are not supported (ex: FreeBSD 13)
|
|
||||||
+ filename = os_helper.TESTFN
|
|
||||||
+ try:
|
|
||||||
+ open(filename, "w").close()
|
|
||||||
+ try:
|
|
||||||
+ os.chflags(filename, flags)
|
|
||||||
+ except OSError as exc:
|
|
||||||
+ # "OSError: [Errno 45] Operation not supported"
|
|
||||||
+ self.skipTest(f"chflags() doesn't support flags "
|
|
||||||
+ f"{flags:#b}: {exc}")
|
|
||||||
+ else:
|
|
||||||
+ os.chflags(filename, 0)
|
|
||||||
+ finally:
|
|
||||||
+ os_helper.unlink(filename)
|
|
||||||
+
|
|
||||||
+ def check_flags(self, flags):
|
|
||||||
+ # skip the test if these flags are not supported (ex: FreeBSD 13)
|
|
||||||
filename = os_helper.TESTFN
|
|
||||||
try:
|
|
||||||
open(filename, "w").close()
|
|
||||||
Index: Python-3.12.4/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
|
||||||
===================================================================
|
|
||||||
--- /dev/null
|
|
||||||
+++ Python-3.12.4/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
|
||||||
@@ -0,0 +1,2 @@
|
|
||||||
+Fix a bug in :class:`tempfile.TemporaryDirectory` cleanup, which now no longer
|
|
||||||
+dereferences symlinks when working around file system permission errors.
|
|
@ -1,148 +0,0 @@
|
|||||||
---
|
|
||||||
Lib/test/test_zipfile/_path/test_path.py | 78 ++++++++++
|
|
||||||
Lib/zipfile/_path/__init__.py | 18 ++
|
|
||||||
Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst | 1
|
|
||||||
Misc/NEWS.d/next/Library/2024-08-26-13-45-20.gh-issue-123270.gXHvNJ.rst | 3
|
|
||||||
4 files changed, 98 insertions(+), 2 deletions(-)
|
|
||||||
|
|
||||||
--- a/Lib/test/test_zipfile/_path/test_path.py
|
|
||||||
+++ b/Lib/test/test_zipfile/_path/test_path.py
|
|
||||||
@@ -4,6 +4,7 @@ import contextlib
|
|
||||||
import pathlib
|
|
||||||
import pickle
|
|
||||||
import sys
|
|
||||||
+import time
|
|
||||||
import unittest
|
|
||||||
import zipfile
|
|
||||||
|
|
||||||
@@ -577,3 +578,80 @@ class TestPath(unittest.TestCase):
|
|
||||||
zipfile.Path(alpharep)
|
|
||||||
with self.assertRaises(KeyError):
|
|
||||||
alpharep.getinfo('does-not-exist')
|
|
||||||
+
|
|
||||||
+ def test_malformed_paths(self):
|
|
||||||
+ """
|
|
||||||
+ Path should handle malformed paths gracefully.
|
|
||||||
+
|
|
||||||
+ Paths with leading slashes are not visible.
|
|
||||||
+
|
|
||||||
+ Paths with dots are treated like regular files.
|
|
||||||
+ """
|
|
||||||
+ data = io.BytesIO()
|
|
||||||
+ zf = zipfile.ZipFile(data, "w")
|
|
||||||
+ zf.writestr("/one-slash.txt", b"content")
|
|
||||||
+ zf.writestr("//two-slash.txt", b"content")
|
|
||||||
+ zf.writestr("../parent.txt", b"content")
|
|
||||||
+ zf.filename = ''
|
|
||||||
+ root = zipfile.Path(zf)
|
|
||||||
+ assert list(map(str, root.iterdir())) == ['../']
|
|
||||||
+ assert root.joinpath('..').joinpath('parent.txt').read_bytes() == b'content'
|
|
||||||
+
|
|
||||||
+ def test_unsupported_names(self):
|
|
||||||
+ """
|
|
||||||
+ Path segments with special characters are readable.
|
|
||||||
+
|
|
||||||
+ On some platforms or file systems, characters like
|
|
||||||
+ ``:`` and ``?`` are not allowed, but they are valid
|
|
||||||
+ in the zip file.
|
|
||||||
+ """
|
|
||||||
+ data = io.BytesIO()
|
|
||||||
+ zf = zipfile.ZipFile(data, "w")
|
|
||||||
+ zf.writestr("path?", b"content")
|
|
||||||
+ zf.writestr("V: NMS.flac", b"fLaC...")
|
|
||||||
+ zf.filename = ''
|
|
||||||
+ root = zipfile.Path(zf)
|
|
||||||
+ contents = root.iterdir()
|
|
||||||
+ assert next(contents).name == 'path?'
|
|
||||||
+ assert next(contents).name == 'V: NMS.flac'
|
|
||||||
+ assert root.joinpath('V: NMS.flac').read_bytes() == b"fLaC..."
|
|
||||||
+
|
|
||||||
+ def test_backslash_not_separator(self):
|
|
||||||
+ """
|
|
||||||
+ In a zip file, backslashes are not separators.
|
|
||||||
+ """
|
|
||||||
+ data = io.BytesIO()
|
|
||||||
+ zf = zipfile.ZipFile(data, "w")
|
|
||||||
+ zf.writestr(DirtyZipInfo.for_name("foo\\bar", zf), b"content")
|
|
||||||
+ zf.filename = ''
|
|
||||||
+ root = zipfile.Path(zf)
|
|
||||||
+ (first,) = root.iterdir()
|
|
||||||
+ assert not first.is_dir()
|
|
||||||
+ assert first.name == 'foo\\bar'
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+class DirtyZipInfo(zipfile.ZipInfo):
|
|
||||||
+ """
|
|
||||||
+ Bypass name sanitization.
|
|
||||||
+ """
|
|
||||||
+
|
|
||||||
+ def __init__(self, filename, *args, **kwargs):
|
|
||||||
+ super().__init__(filename, *args, **kwargs)
|
|
||||||
+ self.filename = filename
|
|
||||||
+
|
|
||||||
+ @classmethod
|
|
||||||
+ def for_name(cls, name, archive):
|
|
||||||
+ """
|
|
||||||
+ Construct the same way that ZipFile.writestr does.
|
|
||||||
+
|
|
||||||
+ TODO: extract this functionality and re-use
|
|
||||||
+ """
|
|
||||||
+ self = cls(filename=name, date_time=time.localtime(time.time())[:6])
|
|
||||||
+ self.compress_type = archive.compression
|
|
||||||
+ self.compress_level = archive.compresslevel
|
|
||||||
+ if self.filename.endswith('/'): # pragma: no cover
|
|
||||||
+ self.external_attr = 0o40775 << 16 # drwxrwxr-x
|
|
||||||
+ self.external_attr |= 0x10 # MS-DOS directory flag
|
|
||||||
+ else:
|
|
||||||
+ self.external_attr = 0o600 << 16 # ?rw-------
|
|
||||||
+ return self
|
|
||||||
--- a/Lib/zipfile/_path/__init__.py
|
|
||||||
+++ b/Lib/zipfile/_path/__init__.py
|
|
||||||
@@ -1,3 +1,12 @@
|
|
||||||
+"""
|
|
||||||
+A Path-like interface for zipfiles.
|
|
||||||
+
|
|
||||||
+This codebase is shared between zipfile.Path in the stdlib
|
|
||||||
+and zipp in PyPI. See
|
|
||||||
+https://github.com/python/importlib_metadata/wiki/Development-Methodology
|
|
||||||
+for more detail.
|
|
||||||
+"""
|
|
||||||
+
|
|
||||||
import io
|
|
||||||
import posixpath
|
|
||||||
import zipfile
|
|
||||||
@@ -34,7 +43,7 @@ def _parents(path):
|
|
||||||
def _ancestry(path):
|
|
||||||
"""
|
|
||||||
Given a path with elements separated by
|
|
||||||
- posixpath.sep, generate all elements of that path
|
|
||||||
+ posixpath.sep, generate all elements of that path.
|
|
||||||
|
|
||||||
>>> list(_ancestry('b/d'))
|
|
||||||
['b/d', 'b']
|
|
||||||
@@ -46,9 +55,14 @@ def _ancestry(path):
|
|
||||||
['b']
|
|
||||||
>>> list(_ancestry(''))
|
|
||||||
[]
|
|
||||||
+
|
|
||||||
+ Multiple separators are treated like a single.
|
|
||||||
+
|
|
||||||
+ >>> list(_ancestry('//b//d///f//'))
|
|
||||||
+ ['//b//d///f', '//b//d', '//b']
|
|
||||||
"""
|
|
||||||
path = path.rstrip(posixpath.sep)
|
|
||||||
- while path and path != posixpath.sep:
|
|
||||||
+ while path.rstrip(posixpath.sep):
|
|
||||||
yield path
|
|
||||||
path, tail = posixpath.split(path)
|
|
||||||
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst
|
|
||||||
@@ -0,0 +1 @@
|
|
||||||
+:class:`zipfile.Path` objects now sanitize names from the zipfile.
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/Misc/NEWS.d/next/Library/2024-08-26-13-45-20.gh-issue-123270.gXHvNJ.rst
|
|
||||||
@@ -0,0 +1,3 @@
|
|
||||||
+Applied a more surgical fix for malformed payloads in :class:`zipfile.Path`
|
|
||||||
+causing infinite loops (gh-122905) without breaking contents using
|
|
||||||
+legitimate characters.
|
|
@ -24,15 +24,12 @@ Co-authored-by: Miro Hrončok <miro@hroncok.cz>
|
|||||||
Co-authored-by: Michal Cyprian <m.cyprian@gmail.com>
|
Co-authored-by: Michal Cyprian <m.cyprian@gmail.com>
|
||||||
Co-authored-by: Lumír Balhar <frenzy.madness@gmail.com>
|
Co-authored-by: Lumír Balhar <frenzy.madness@gmail.com>
|
||||||
---
|
---
|
||||||
Lib/site.py | 9 ++++++-
|
Lib/sysconfig.py | 51 ++++++++++++++++++++++++++++++++++++++++++++-
|
||||||
Lib/sysconfig.py | 49 +++++++++++++++++++++++++++++++++++++-
|
Lib/test/test_sysconfig.py | 17 +++++++++++++--
|
||||||
Lib/test/test_sysconfig.py | 17 +++++++++++--
|
2 files changed, 65 insertions(+), 3 deletions(-)
|
||||||
3 files changed, 71 insertions(+), 4 deletions(-)
|
|
||||||
|
|
||||||
Index: Python-3.12.4/Lib/sysconfig.py
|
--- a/Lib/sysconfig.py
|
||||||
===================================================================
|
+++ b/Lib/sysconfig.py
|
||||||
--- Python-3.12.4.orig/Lib/sysconfig.py
|
|
||||||
+++ Python-3.12.4/Lib/sysconfig.py
|
|
||||||
@@ -104,6 +104,11 @@ if os.name == 'nt':
|
@@ -104,6 +104,11 @@ if os.name == 'nt':
|
||||||
else:
|
else:
|
||||||
_INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv']
|
_INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv']
|
||||||
@ -45,7 +42,7 @@ Index: Python-3.12.4/Lib/sysconfig.py
|
|||||||
|
|
||||||
# NOTE: site.py has copy of this function.
|
# NOTE: site.py has copy of this function.
|
||||||
# Sync it when modify this function.
|
# Sync it when modify this function.
|
||||||
@@ -163,6 +168,19 @@ if _HAS_USER_BASE:
|
@@ -163,13 +168,28 @@ if _HAS_USER_BASE:
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,7 +62,16 @@ Index: Python-3.12.4/Lib/sysconfig.py
|
|||||||
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
|
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
|
||||||
'scripts', 'data')
|
'scripts', 'data')
|
||||||
|
|
||||||
@@ -263,11 +281,40 @@ def _extend_dict(target_dict, other_dict
|
_PY_VERSION = sys.version.split()[0]
|
||||||
|
_PY_VERSION_SHORT = f'{sys.version_info[0]}.{sys.version_info[1]}'
|
||||||
|
_PY_VERSION_SHORT_NO_DOT = f'{sys.version_info[0]}{sys.version_info[1]}'
|
||||||
|
+_PREFIX = os.path.normpath(sys.prefix)
|
||||||
|
_BASE_PREFIX = os.path.normpath(sys.base_prefix)
|
||||||
|
+_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
|
||||||
|
_BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
|
||||||
|
# Mutex guarding initialization of _CONFIG_VARS.
|
||||||
|
_CONFIG_VARS_LOCK = threading.RLock()
|
||||||
|
@@ -261,11 +281,40 @@ def _extend_dict(target_dict, other_dict
|
||||||
target_dict[key] = value
|
target_dict[key] = value
|
||||||
|
|
||||||
|
|
||||||
@ -107,11 +113,9 @@ Index: Python-3.12.4/Lib/sysconfig.py
|
|||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
# On Windows we want to substitute 'lib' for schemes rather
|
# On Windows we want to substitute 'lib' for schemes rather
|
||||||
# than the native value (without modifying vars, in case it
|
# than the native value (without modifying vars, in case it
|
||||||
Index: Python-3.12.4/Lib/test/test_sysconfig.py
|
--- a/Lib/test/test_sysconfig.py
|
||||||
===================================================================
|
+++ b/Lib/test/test_sysconfig.py
|
||||||
--- Python-3.12.4.orig/Lib/test/test_sysconfig.py
|
@@ -119,8 +119,19 @@ class TestSysConfig(unittest.TestCase):
|
||||||
+++ Python-3.12.4/Lib/test/test_sysconfig.py
|
|
||||||
@@ -110,8 +110,19 @@ class TestSysConfig(unittest.TestCase):
|
|
||||||
for scheme in _INSTALL_SCHEMES:
|
for scheme in _INSTALL_SCHEMES:
|
||||||
for name in _INSTALL_SCHEMES[scheme]:
|
for name in _INSTALL_SCHEMES[scheme]:
|
||||||
expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars)
|
expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars)
|
||||||
@ -132,7 +136,7 @@ Index: Python-3.12.4/Lib/test/test_sysconfig.py
|
|||||||
os.path.normpath(expected),
|
os.path.normpath(expected),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -344,7 +355,7 @@ class TestSysConfig(unittest.TestCase):
|
@@ -353,7 +364,7 @@ class TestSysConfig(unittest.TestCase):
|
||||||
self.assertTrue(os.path.isfile(config_h), config_h)
|
self.assertTrue(os.path.isfile(config_h), config_h)
|
||||||
|
|
||||||
def test_get_scheme_names(self):
|
def test_get_scheme_names(self):
|
||||||
@ -141,7 +145,7 @@ Index: Python-3.12.4/Lib/test/test_sysconfig.py
|
|||||||
if HAS_USER_BASE:
|
if HAS_USER_BASE:
|
||||||
wanted.extend(['nt_user', 'osx_framework_user', 'posix_user'])
|
wanted.extend(['nt_user', 'osx_framework_user', 'posix_user'])
|
||||||
self.assertEqual(get_scheme_names(), tuple(sorted(wanted)))
|
self.assertEqual(get_scheme_names(), tuple(sorted(wanted)))
|
||||||
@@ -356,6 +367,8 @@ class TestSysConfig(unittest.TestCase):
|
@@ -365,6 +376,8 @@ class TestSysConfig(unittest.TestCase):
|
||||||
cmd = "-c", "import sysconfig; print(sysconfig.get_platform())"
|
cmd = "-c", "import sysconfig; print(sysconfig.get_platform())"
|
||||||
self.assertEqual(py.call_real(*cmd), py.call_link(*cmd))
|
self.assertEqual(py.call_real(*cmd), py.call_link(*cmd))
|
||||||
|
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
version https://git-lfs.github.com/spec/v1
|
|
||||||
oid sha256:fa8a2e12c5e620b09f53e65bcd87550d2e5a1e2e04bf8ba991dcc55113876397
|
|
||||||
size 20422396
|
|
@ -1,18 +0,0 @@
|
|||||||
-----BEGIN PGP SIGNATURE-----
|
|
||||||
|
|
||||||
iQKTBAABCgB9FiEEcWlgX2LHUTVtBUomqCHmgOX6YwUFAmayiFtfFIAAAAAALgAo
|
|
||||||
aXNzdWVyLWZwckBub3RhdGlvbnMub3BlbnBncC5maWZ0aGhvcnNlbWFuLm5ldDcx
|
|
||||||
Njk2MDVGNjJDNzUxMzU2RDA1NEEyNkE4MjFFNjgwRTVGQTYzMDUACgkQqCHmgOX6
|
|
||||||
YwUr4g//VyVs9tvbtiSp8pGe8f1gYErEw54r124sL/CBuNii8Irts1j5ymGxcm+l
|
|
||||||
hshPK5UlqRnhd5dCJWFTvLTXa5Ko2R1L3JyyxfGd1hmDuMhrWsDHijI0R7L/mGM5
|
|
||||||
6X2LTaadBVNvk8HaNKvR8SEWvo68rdnOuYElFA9ir7uqwjO26ZWz9FfH80YDGwo8
|
|
||||||
Blef2NYw8rNhiaZMFV0HYV7D+YyUAZnFNfW8M7Fd4oskUyj1tD9J89T9FFLYN09d
|
|
||||||
BcCIf+EdiEfqRpKxH89bW2g52kDrm4jYGONtpyF8eruyS3YwYSbvbuWioBYKmlxC
|
|
||||||
s51mieXz6G325GTZnmPxLek3ywPv6Gil9y0wH3fIr2BsWsmXust4LBpjDGt56Fy6
|
|
||||||
seokGBg8xzsBSk3iEqNoFmNsy/QOiuCcDejX4XqBDNodOlETQPJb07TkTI2iOmg9
|
|
||||||
NG4Atiz1HvGVxK68UuK9IIcNHyaWUmH8h4VQFGvc6KV6feP5Nm21Y12PZ5XIqJBO
|
|
||||||
Y8M/VJIJ5koaNPQfnBbbI5YBkUr4BVpIXIpY5LM/L5sUo2C3R7hMi0VGK88HGfSQ
|
|
||||||
KV4JmZgf6RMBNmrWY12sryS1QQ6q3P110GTUGQWB3sxxNbhmfcrK+4viqHc83yDz
|
|
||||||
ifmk33HuqaQGU7OzUMHeNcoCJIPo3H1FpoHOn9wLLCtA1pT+as4=
|
|
||||||
=t0Rk
|
|
||||||
-----END PGP SIGNATURE-----
|
|
3
Python-3.12.9.tar.xz
Normal file
3
Python-3.12.9.tar.xz
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:7220835d9f90b37c006e9842a8dff4580aaca4318674f947302b8d28f3f81112
|
||||||
|
size 20502440
|
18
Python-3.12.9.tar.xz.asc
Normal file
18
Python-3.12.9.tar.xz.asc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
-----BEGIN PGP SIGNATURE-----
|
||||||
|
|
||||||
|
iQKTBAABCgB9FiEEcWlgX2LHUTVtBUomqCHmgOX6YwUFAmeiX7JfFIAAAAAALgAo
|
||||||
|
aXNzdWVyLWZwckBub3RhdGlvbnMub3BlbnBncC5maWZ0aGhvcnNlbWFuLm5ldDcx
|
||||||
|
Njk2MDVGNjJDNzUxMzU2RDA1NEEyNkE4MjFFNjgwRTVGQTYzMDUACgkQqCHmgOX6
|
||||||
|
YwXTqw//VlGJA5CRDfljMwN9BmG2hdXB1B7Lj0PssuAo4A/lH99gb4DRVDS9LNjr
|
||||||
|
99WdH/fQQovx6rTbtyJnN8Vh7SSduBi/vOc5n5VOXZB0buqR0l+0wu4m43Slu6xP
|
||||||
|
fXO349Hr6585lemU8x54TrP756rSVUhy3T+krUuNDL9W1Wrp2yDCpt4tUoEhNXGw
|
||||||
|
DoYS8MrK/ygLNV/7p2DeMWOHNdbjKNH6rfzl60IAwAp7oANcyoj6Pho960bbeUDo
|
||||||
|
tb47Pw0WWZv3EuITP6bPa8+Z6dj096cFL3AQJ3ap16OduwiaOsGhqTfe4+kbp6ut
|
||||||
|
Gp/1HeIHzPbEV0E5K78RWHuzBYgU1oPGiMjlp7WkA7bP2OSTF7nM4EBkiiihk2qx
|
||||||
|
3d5VF9wpVRJ4AuR/aWcWcMnvD2ziSWfzZM3Z3VLnTaWYpuRkQp8TTiFr1vHqxMYm
|
||||||
|
p/8AozzBJMfOS6u/Q0WNAdk6x3VB0DXnTAETXQVIrex4DXqX/3WSMWK5/x/OyCh9
|
||||||
|
ytdreIQYbv1KvlNQJkgpPb7jlUSXp8t9fHCXt4hszhJgtjwIj/+CuSeAgX0bhopV
|
||||||
|
XsqOBseDNhATg38mhwBVaeFKGRpxsKdpxcdqSEGKuhXtEI/hJmkpZGw49gy3xWxB
|
||||||
|
KlgRgKjCPw+BGAIVV9qvdtJzam8a09SKVcslqgF619q0byQoBmo=
|
||||||
|
=1TbP
|
||||||
|
-----END PGP SIGNATURE-----
|
1
Python-3.12.9.tar.xz.sigstore
Normal file
1
Python-3.12.9.tar.xz.sigstore
Normal file
File diff suppressed because one or more lines are too long
592
doc-py38-to-py36.patch
Normal file
592
doc-py38-to-py36.patch
Normal file
@ -0,0 +1,592 @@
|
|||||||
|
---
|
||||||
|
Doc/Makefile | 8 ++--
|
||||||
|
Doc/conf.py | 16 ++++++++-
|
||||||
|
Doc/tools/check-warnings.py | 5 +-
|
||||||
|
Doc/tools/extensions/audit_events.py | 54 ++++++++++++++++----------------
|
||||||
|
Doc/tools/extensions/availability.py | 15 ++++----
|
||||||
|
Doc/tools/extensions/c_annotations.py | 45 ++++++++++++++++----------
|
||||||
|
Doc/tools/extensions/changes.py | 8 +---
|
||||||
|
Doc/tools/extensions/glossary_search.py | 10 +----
|
||||||
|
Doc/tools/extensions/misc_news.py | 14 +++-----
|
||||||
|
Doc/tools/extensions/patchlevel.py | 9 ++---
|
||||||
|
10 files changed, 100 insertions(+), 84 deletions(-)
|
||||||
|
|
||||||
|
--- a/Doc/Makefile
|
||||||
|
+++ b/Doc/Makefile
|
||||||
|
@@ -14,15 +14,15 @@ PAPER =
|
||||||
|
SOURCES =
|
||||||
|
DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py)
|
||||||
|
REQUIREMENTS = requirements.txt
|
||||||
|
-SPHINXERRORHANDLING = --fail-on-warning
|
||||||
|
+SPHINXERRORHANDLING = -W
|
||||||
|
|
||||||
|
# Internal variables.
|
||||||
|
PAPEROPT_a4 = --define latex_elements.papersize=a4paper
|
||||||
|
PAPEROPT_letter = --define latex_elements.papersize=letterpaper
|
||||||
|
|
||||||
|
-ALLSPHINXOPTS = --builder $(BUILDER) \
|
||||||
|
- --doctree-dir build/doctrees \
|
||||||
|
- --jobs $(JOBS) \
|
||||||
|
+ALLSPHINXOPTS = -b $(BUILDER) \
|
||||||
|
+ -d build/doctrees \
|
||||||
|
+ -j $(JOBS) \
|
||||||
|
$(PAPEROPT_$(PAPER)) \
|
||||||
|
$(SPHINXOPTS) $(SPHINXERRORHANDLING) \
|
||||||
|
. build/$(BUILDER) $(SOURCES)
|
||||||
|
--- a/Doc/conf.py
|
||||||
|
+++ b/Doc/conf.py
|
||||||
|
@@ -10,6 +10,8 @@ import importlib
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
+from sphinx import version_info
|
||||||
|
+
|
||||||
|
# Make our custom extensions available to Sphinx
|
||||||
|
sys.path.append(os.path.abspath('tools/extensions'))
|
||||||
|
sys.path.append(os.path.abspath('includes'))
|
||||||
|
@@ -82,7 +84,7 @@ highlight_language = 'python3'
|
||||||
|
|
||||||
|
# Minimum version of sphinx required
|
||||||
|
# Keep this version in sync with ``Doc/requirements.txt``.
|
||||||
|
-needs_sphinx = '8.1.3'
|
||||||
|
+needs_sphinx = '4.2.0'
|
||||||
|
|
||||||
|
# Create table of contents entries for domain objects (e.g. functions, classes,
|
||||||
|
# attributes, etc.). Default is True.
|
||||||
|
@@ -337,7 +339,7 @@ html_short_title = f'{release} Documenta
|
||||||
|
# (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html)
|
||||||
|
is_deployment_preview = os.getenv("READTHEDOCS_VERSION_TYPE") == "external"
|
||||||
|
repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL", "")
|
||||||
|
-repository_url = repository_url.removesuffix(".git")
|
||||||
|
+repository_url = repository_url[:-len(".git")]
|
||||||
|
html_context = {
|
||||||
|
"is_deployment_preview": is_deployment_preview,
|
||||||
|
"repository_url": repository_url or None,
|
||||||
|
@@ -583,6 +585,16 @@ extlinks = {
|
||||||
|
}
|
||||||
|
extlinks_detect_hardcoded_links = True
|
||||||
|
|
||||||
|
+if version_info[:2] < (8, 1):
|
||||||
|
+ # Sphinx 8.1 has in-built CVE and CWE roles.
|
||||||
|
+ extlinks.update({
|
||||||
|
+ "cve": (
|
||||||
|
+ "https://www.cve.org/CVERecord?id=CVE-%s",
|
||||||
|
+ "CVE-%s",
|
||||||
|
+ ),
|
||||||
|
+ "cwe": ("https://cwe.mitre.org/data/definitions/%s.html", "CWE-%s"),
|
||||||
|
+ })
|
||||||
|
+
|
||||||
|
# Options for c_annotations extension
|
||||||
|
# -----------------------------------
|
||||||
|
|
||||||
|
--- a/Doc/tools/check-warnings.py
|
||||||
|
+++ b/Doc/tools/check-warnings.py
|
||||||
|
@@ -228,7 +228,8 @@ def fail_if_regression(
|
||||||
|
print(filename)
|
||||||
|
for warning in warnings:
|
||||||
|
if filename in warning:
|
||||||
|
- if match := WARNING_PATTERN.fullmatch(warning):
|
||||||
|
+ match = WARNING_PATTERN.fullmatch(warning)
|
||||||
|
+ if match:
|
||||||
|
print(" {line}: {msg}".format_map(match))
|
||||||
|
return -1
|
||||||
|
return 0
|
||||||
|
@@ -316,7 +317,7 @@ def main(argv: list[str] | None = None)
|
||||||
|
|
||||||
|
cwd = str(Path.cwd()) + os.path.sep
|
||||||
|
files_with_nits = {
|
||||||
|
- warning.removeprefix(cwd).split(":")[0]
|
||||||
|
+ (warning[len(cwd):].split(":")[0] if warning.startswith(cwd) else warning.split(":")[0])
|
||||||
|
for warning in warnings
|
||||||
|
if "Doc/" in warning
|
||||||
|
}
|
||||||
|
--- a/Doc/tools/extensions/audit_events.py
|
||||||
|
+++ b/Doc/tools/extensions/audit_events.py
|
||||||
|
@@ -1,9 +1,6 @@
|
||||||
|
"""Support for documenting audit events."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import re
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx.errors import NoUri
|
||||||
|
@@ -12,12 +9,11 @@ from sphinx.transforms.post_transforms i
|
||||||
|
from sphinx.util import logging
|
||||||
|
from sphinx.util.docutils import SphinxDirective
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from collections.abc import Iterator
|
||||||
|
+from typing import Any, List, Tuple
|
||||||
|
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.builders import Builder
|
||||||
|
- from sphinx.environment import BuildEnvironment
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
+from sphinx.builders import Builder
|
||||||
|
+from sphinx.environment import BuildEnvironment
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -32,16 +28,16 @@ _SYNONYMS = [
|
||||||
|
|
||||||
|
class AuditEvents:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
- self.events: dict[str, list[str]] = {}
|
||||||
|
- self.sources: dict[str, list[tuple[str, str]]] = {}
|
||||||
|
+ self.events: dict[str, List[str]] = {}
|
||||||
|
+ self.sources: dict[str, List[Tuple[str, str]]] = {}
|
||||||
|
|
||||||
|
- def __iter__(self) -> Iterator[tuple[str, list[str], tuple[str, str]]]:
|
||||||
|
+ def __iter__(self) -> Any:
|
||||||
|
for name, args in self.events.items():
|
||||||
|
for source in self.sources[name]:
|
||||||
|
yield name, args, source
|
||||||
|
|
||||||
|
def add_event(
|
||||||
|
- self, name, args: list[str], source: tuple[str, str]
|
||||||
|
+ self, name, args: List[str], source: Tuple[str, str]
|
||||||
|
) -> None:
|
||||||
|
if name in self.events:
|
||||||
|
self._check_args_match(name, args)
|
||||||
|
@@ -49,7 +45,7 @@ class AuditEvents:
|
||||||
|
self.events[name] = args
|
||||||
|
self.sources.setdefault(name, []).append(source)
|
||||||
|
|
||||||
|
- def _check_args_match(self, name: str, args: list[str]) -> None:
|
||||||
|
+ def _check_args_match(self, name: str, args: List[str]) -> None:
|
||||||
|
current_args = self.events[name]
|
||||||
|
msg = (
|
||||||
|
f"Mismatched arguments for audit-event {name}: "
|
||||||
|
@@ -60,7 +56,7 @@ class AuditEvents:
|
||||||
|
if len(current_args) != len(args):
|
||||||
|
logger.warning(msg)
|
||||||
|
return
|
||||||
|
- for a1, a2 in zip(current_args, args, strict=False):
|
||||||
|
+ for a1, a2 in zip(current_args, args):
|
||||||
|
if a1 == a2:
|
||||||
|
continue
|
||||||
|
if any(a1 in s and a2 in s for s in _SYNONYMS):
|
||||||
|
@@ -73,7 +69,7 @@ class AuditEvents:
|
||||||
|
name_clean = re.sub(r"\W", "_", name)
|
||||||
|
return f"audit_event_{name_clean}_{source_count}"
|
||||||
|
|
||||||
|
- def rows(self) -> Iterator[tuple[str, list[str], list[tuple[str, str]]]]:
|
||||||
|
+ def rows(self) -> Any:
|
||||||
|
for name in sorted(self.events.keys()):
|
||||||
|
yield name, self.events[name], self.sources[name]
|
||||||
|
|
||||||
|
@@ -97,7 +93,7 @@ def audit_events_purge(
|
||||||
|
def audit_events_merge(
|
||||||
|
app: Sphinx,
|
||||||
|
env: BuildEnvironment,
|
||||||
|
- docnames: list[str],
|
||||||
|
+ docnames: List[str],
|
||||||
|
other: BuildEnvironment,
|
||||||
|
) -> None:
|
||||||
|
"""In Sphinx parallel builds, this merges audit_events from subprocesses."""
|
||||||
|
@@ -126,14 +122,16 @@ class AuditEvent(SphinxDirective):
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
- def run(self) -> list[nodes.paragraph]:
|
||||||
|
+ def run(self) -> List[nodes.paragraph]:
|
||||||
|
+ def _no_walrus_op(args):
|
||||||
|
+ for arg in args.strip("'\"").split(","):
|
||||||
|
+ aarg = arg.strip()
|
||||||
|
+ if aarg:
|
||||||
|
+ yield aarg
|
||||||
|
+
|
||||||
|
name = self.arguments[0]
|
||||||
|
if len(self.arguments) >= 2 and self.arguments[1]:
|
||||||
|
- args = [
|
||||||
|
- arg
|
||||||
|
- for argument in self.arguments[1].strip("'\"").split(",")
|
||||||
|
- if (arg := argument.strip())
|
||||||
|
- ]
|
||||||
|
+ args = list(_no_walrus_op(self.arguments[1]))
|
||||||
|
else:
|
||||||
|
args = []
|
||||||
|
ids = []
|
||||||
|
@@ -169,7 +167,7 @@ class audit_event_list(nodes.General, no
|
||||||
|
|
||||||
|
|
||||||
|
class AuditEventListDirective(SphinxDirective):
|
||||||
|
- def run(self) -> list[audit_event_list]:
|
||||||
|
+ def run(self) -> List[audit_event_list]:
|
||||||
|
return [audit_event_list()]
|
||||||
|
|
||||||
|
|
||||||
|
@@ -181,7 +179,11 @@ class AuditEventListTransform(SphinxPost
|
||||||
|
return
|
||||||
|
|
||||||
|
table = self._make_table(self.app.builder, self.env.docname)
|
||||||
|
- for node in self.document.findall(audit_event_list):
|
||||||
|
+ try:
|
||||||
|
+ findall = self.document.findall
|
||||||
|
+ except AttributeError:
|
||||||
|
+ findall = self.document.traverse
|
||||||
|
+ for node in findall(audit_event_list):
|
||||||
|
node.replace_self(table)
|
||||||
|
|
||||||
|
def _make_table(self, builder: Builder, docname: str) -> nodes.table:
|
||||||
|
@@ -217,8 +219,8 @@ class AuditEventListTransform(SphinxPost
|
||||||
|
builder: Builder,
|
||||||
|
docname: str,
|
||||||
|
name: str,
|
||||||
|
- args: list[str],
|
||||||
|
- sources: list[tuple[str, str]],
|
||||||
|
+ args: List[str],
|
||||||
|
+ sources: List[Tuple[str, str]],
|
||||||
|
) -> nodes.row:
|
||||||
|
row = nodes.row()
|
||||||
|
name_node = nodes.paragraph("", nodes.Text(name))
|
||||||
|
--- a/Doc/tools/extensions/availability.py
|
||||||
|
+++ b/Doc/tools/extensions/availability.py
|
||||||
|
@@ -1,8 +1,6 @@
|
||||||
|
"""Support for documenting platform availability"""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import Dict, List, TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx import addnodes
|
||||||
|
@@ -53,7 +51,7 @@ class Availability(SphinxDirective):
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = True
|
||||||
|
|
||||||
|
- def run(self) -> list[nodes.container]:
|
||||||
|
+ def run(self) -> List[nodes.container]:
|
||||||
|
title = sphinx_gettext("Availability")
|
||||||
|
refnode = addnodes.pending_xref(
|
||||||
|
title,
|
||||||
|
@@ -77,7 +75,7 @@ class Availability(SphinxDirective):
|
||||||
|
|
||||||
|
return [cnode]
|
||||||
|
|
||||||
|
- def parse_platforms(self) -> dict[str, str | bool]:
|
||||||
|
+ def parse_platforms(self) -> Dict[str, Union[str, bool]]:
|
||||||
|
"""Parse platform information from arguments
|
||||||
|
|
||||||
|
Arguments is a comma-separated string of platforms. A platform may
|
||||||
|
@@ -96,12 +94,13 @@ class Availability(SphinxDirective):
|
||||||
|
platform, _, version = arg.partition(" >= ")
|
||||||
|
if platform.startswith("not "):
|
||||||
|
version = False
|
||||||
|
- platform = platform.removeprefix("not ")
|
||||||
|
+ platform = platform[len("not "):]
|
||||||
|
elif not version:
|
||||||
|
version = True
|
||||||
|
platforms[platform] = version
|
||||||
|
|
||||||
|
- if unknown := set(platforms).difference(KNOWN_PLATFORMS):
|
||||||
|
+ unknown = set(platforms).difference(KNOWN_PLATFORMS)
|
||||||
|
+ if unknown:
|
||||||
|
logger.warning(
|
||||||
|
"Unknown platform%s or syntax '%s' in '.. availability:: %s', "
|
||||||
|
"see %s:KNOWN_PLATFORMS for a set of known platforms.",
|
||||||
|
@@ -114,7 +113,7 @@ class Availability(SphinxDirective):
|
||||||
|
return platforms
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app):
|
||||||
|
app.add_directive("availability", Availability)
|
||||||
|
|
||||||
|
return {
|
||||||
|
--- a/Doc/tools/extensions/c_annotations.py
|
||||||
|
+++ b/Doc/tools/extensions/c_annotations.py
|
||||||
|
@@ -9,22 +9,18 @@ Configuration:
|
||||||
|
* Set ``stable_abi_file`` to the path to stable ABI list.
|
||||||
|
"""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import csv
|
||||||
|
import dataclasses
|
||||||
|
from pathlib import Path
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import Any, Dict, List, TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.statemachine import StringList
|
||||||
|
-from sphinx import addnodes
|
||||||
|
+from sphinx import addnodes, version_info
|
||||||
|
from sphinx.locale import _ as sphinx_gettext
|
||||||
|
from sphinx.util.docutils import SphinxDirective
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.util.typing import ExtensionMetadata
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
ROLE_TO_OBJECT_TYPE = {
|
||||||
|
"func": "function",
|
||||||
|
@@ -35,20 +31,20 @@ ROLE_TO_OBJECT_TYPE = {
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
-@dataclasses.dataclass(slots=True)
|
||||||
|
+@dataclasses.dataclass()
|
||||||
|
class RefCountEntry:
|
||||||
|
# Name of the function.
|
||||||
|
name: str
|
||||||
|
# List of (argument name, type, refcount effect) tuples.
|
||||||
|
# (Currently not used. If it was, a dataclass might work better.)
|
||||||
|
- args: list = dataclasses.field(default_factory=list)
|
||||||
|
+ args: List = dataclasses.field(default_factory=list)
|
||||||
|
# Return type of the function.
|
||||||
|
result_type: str = ""
|
||||||
|
# Reference count effect for the return value.
|
||||||
|
- result_refs: int | None = None
|
||||||
|
+ result_refs: Union[int, None] = None
|
||||||
|
|
||||||
|
|
||||||
|
-@dataclasses.dataclass(frozen=True, slots=True)
|
||||||
|
+@dataclasses.dataclass(frozen=True)
|
||||||
|
class StableABIEntry:
|
||||||
|
# Role of the object.
|
||||||
|
# Source: Each [item_kind] in stable_abi.toml is mapped to a C Domain role.
|
||||||
|
@@ -67,7 +63,7 @@ class StableABIEntry:
|
||||||
|
struct_abi_kind: str
|
||||||
|
|
||||||
|
|
||||||
|
-def read_refcount_data(refcount_filename: Path) -> dict[str, RefCountEntry]:
|
||||||
|
+def read_refcount_data(refcount_filename: Path) -> Dict[str, RefCountEntry]:
|
||||||
|
refcount_data = {}
|
||||||
|
refcounts = refcount_filename.read_text(encoding="utf8")
|
||||||
|
for line in refcounts.splitlines():
|
||||||
|
@@ -103,7 +99,7 @@ def read_refcount_data(refcount_filename
|
||||||
|
return refcount_data
|
||||||
|
|
||||||
|
|
||||||
|
-def read_stable_abi_data(stable_abi_file: Path) -> dict[str, StableABIEntry]:
|
||||||
|
+def read_stable_abi_data(stable_abi_file: Path) -> Dict[str, StableABIEntry]:
|
||||||
|
stable_abi_data = {}
|
||||||
|
with open(stable_abi_file, encoding="utf8") as fp:
|
||||||
|
for record in csv.DictReader(fp):
|
||||||
|
@@ -127,11 +123,14 @@ def add_annotations(app: Sphinx, doctree
|
||||||
|
continue
|
||||||
|
if not par[0].get("ids", None):
|
||||||
|
continue
|
||||||
|
- name = par[0]["ids"][0].removeprefix("c.")
|
||||||
|
+ name = par[0]["ids"][0]
|
||||||
|
+ if name.startswith("c."):
|
||||||
|
+ name = name[len("c."):]
|
||||||
|
objtype = par["objtype"]
|
||||||
|
|
||||||
|
# Stable ABI annotation.
|
||||||
|
- if record := stable_abi_data.get(name):
|
||||||
|
+ record = stable_abi_data.get(name)
|
||||||
|
+ if record:
|
||||||
|
if ROLE_TO_OBJECT_TYPE[record.role] != objtype:
|
||||||
|
msg = (
|
||||||
|
f"Object type mismatch in limited API annotation for {name}: "
|
||||||
|
@@ -238,7 +237,7 @@ def _unstable_api_annotation() -> nodes.
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
-def _return_value_annotation(result_refs: int | None) -> nodes.emphasis:
|
||||||
|
+def _return_value_annotation(result_refs: Union[int, None]) -> nodes.emphasis:
|
||||||
|
classes = ["refcount"]
|
||||||
|
if result_refs is None:
|
||||||
|
rc = sphinx_gettext("Return value: Always NULL.")
|
||||||
|
@@ -258,7 +257,7 @@ class LimitedAPIList(SphinxDirective):
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = True
|
||||||
|
|
||||||
|
- def run(self) -> list[nodes.Node]:
|
||||||
|
+ def run(self) -> List[nodes.Node]:
|
||||||
|
state = self.env.domaindata["c_annotations"]
|
||||||
|
content = [
|
||||||
|
f"* :c:{record.role}:`{record.name}`"
|
||||||
|
@@ -281,13 +280,23 @@ def init_annotations(app: Sphinx) -> Non
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: Sphinx) -> Any:
|
||||||
|
app.add_config_value("refcount_file", "", "env", types={str})
|
||||||
|
app.add_config_value("stable_abi_file", "", "env", types={str})
|
||||||
|
app.add_directive("limited-api-list", LimitedAPIList)
|
||||||
|
app.connect("builder-inited", init_annotations)
|
||||||
|
app.connect("doctree-read", add_annotations)
|
||||||
|
|
||||||
|
+ if version_info[:2] < (7, 2):
|
||||||
|
+ from docutils.parsers.rst import directives
|
||||||
|
+ from sphinx.domains.c import CObject
|
||||||
|
+
|
||||||
|
+ # monkey-patch C object...
|
||||||
|
+ CObject.option_spec.update({
|
||||||
|
+ "no-index-entry": directives.flag,
|
||||||
|
+ "no-contents-entry": directives.flag,
|
||||||
|
+ })
|
||||||
|
+
|
||||||
|
return {
|
||||||
|
"version": "1.0",
|
||||||
|
"parallel_read_safe": True,
|
||||||
|
--- a/Doc/tools/extensions/changes.py
|
||||||
|
+++ b/Doc/tools/extensions/changes.py
|
||||||
|
@@ -1,7 +1,5 @@
|
||||||
|
"""Support for documenting version of changes, additions, deprecations."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sphinx.domains.changeset import (
|
||||||
|
@@ -25,7 +23,7 @@ def expand_version_arg(argument: str, re
|
||||||
|
|
||||||
|
|
||||||
|
class PyVersionChange(VersionChange):
|
||||||
|
- def run(self) -> list[Node]:
|
||||||
|
+ def run(self) -> "list[Node]":
|
||||||
|
# Replace the 'next' special token with the current development version
|
||||||
|
self.arguments[0] = expand_version_arg(
|
||||||
|
self.arguments[0], self.config.release
|
||||||
|
@@ -43,7 +41,7 @@ class DeprecatedRemoved(VersionChange):
|
||||||
|
"Deprecated since version %s, removed in version %s"
|
||||||
|
)
|
||||||
|
|
||||||
|
- def run(self) -> list[Node]:
|
||||||
|
+ def run(self) -> "list[Node]":
|
||||||
|
# Replace the first two arguments (deprecated version and removed version)
|
||||||
|
# with a single tuple of both versions.
|
||||||
|
version_deprecated = expand_version_arg(
|
||||||
|
@@ -73,7 +71,7 @@ class DeprecatedRemoved(VersionChange):
|
||||||
|
versionlabel_classes[self.name] = ""
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: "Sphinx") -> "ExtensionMetadata":
|
||||||
|
# Override Sphinx's directives with support for 'next'
|
||||||
|
app.add_directive("versionadded", PyVersionChange, override=True)
|
||||||
|
app.add_directive("versionchanged", PyVersionChange, override=True)
|
||||||
|
--- a/Doc/tools/extensions/glossary_search.py
|
||||||
|
+++ b/Doc/tools/extensions/glossary_search.py
|
||||||
|
@@ -1,18 +1,14 @@
|
||||||
|
"""Feature search results for glossary items prominently."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
-from typing import TYPE_CHECKING
|
||||||
|
+from typing import Any, TYPE_CHECKING
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from sphinx.addnodes import glossary
|
||||||
|
from sphinx.util import logging
|
||||||
|
|
||||||
|
-if TYPE_CHECKING:
|
||||||
|
- from sphinx.application import Sphinx
|
||||||
|
- from sphinx.util.typing import ExtensionMetadata
|
||||||
|
+from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -60,7 +56,7 @@ def write_glossary_json(app: Sphinx, _ex
|
||||||
|
dest.write_text(json.dumps(app.env.glossary_terms), encoding='utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: Sphinx) -> Any:
|
||||||
|
app.connect('doctree-resolved', process_glossary_nodes)
|
||||||
|
app.connect('build-finished', write_glossary_json)
|
||||||
|
|
||||||
|
--- a/Doc/tools/extensions/misc_news.py
|
||||||
|
+++ b/Doc/tools/extensions/misc_news.py
|
||||||
|
@@ -1,7 +1,5 @@
|
||||||
|
"""Support for including Misc/NEWS."""
|
||||||
|
|
||||||
|
-from __future__ import annotations
|
||||||
|
-
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
@@ -24,13 +22,13 @@ Python News
|
||||||
|
+++++++++++
|
||||||
|
"""
|
||||||
|
|
||||||
|
-bpo_issue_re: Final[re.Pattern[str]] = re.compile(
|
||||||
|
+bpo_issue_re: "Final[re.Pattern[str]]" = re.compile(
|
||||||
|
"(?:issue #|bpo-)([0-9]+)", re.ASCII
|
||||||
|
)
|
||||||
|
-gh_issue_re: Final[re.Pattern[str]] = re.compile(
|
||||||
|
+gh_issue_re: "Final[re.Pattern[str]]" = re.compile(
|
||||||
|
"gh-(?:issue-)?([0-9]+)", re.ASCII | re.IGNORECASE
|
||||||
|
)
|
||||||
|
-whatsnew_re: Final[re.Pattern[str]] = re.compile(
|
||||||
|
+whatsnew_re: "Final[re.Pattern[str]]" = re.compile(
|
||||||
|
r"^what's new in (.*?)\??$", re.ASCII | re.IGNORECASE | re.MULTILINE
|
||||||
|
)
|
||||||
|
|
||||||
|
@@ -42,7 +40,7 @@ class MiscNews(SphinxDirective):
|
||||||
|
final_argument_whitespace = False
|
||||||
|
option_spec = {}
|
||||||
|
|
||||||
|
- def run(self) -> list[Node]:
|
||||||
|
+ def run(self) -> "list[Node]":
|
||||||
|
# Get content of NEWS file
|
||||||
|
source, _ = self.get_source_info()
|
||||||
|
news_file = Path(source).resolve().parent / self.arguments[0]
|
||||||
|
@@ -54,7 +52,7 @@ class MiscNews(SphinxDirective):
|
||||||
|
return [nodes.strong(text, text)]
|
||||||
|
|
||||||
|
# remove first 3 lines as they are the main heading
|
||||||
|
- news_text = news_text.removeprefix(BLURB_HEADER)
|
||||||
|
+ news_text = news_text[len(BLURB_HEADER):] if news_text.startswith(BLURB_HEADER) else news_text
|
||||||
|
|
||||||
|
news_text = bpo_issue_re.sub(r":issue:`\1`", news_text)
|
||||||
|
# Fallback handling for GitHub issues
|
||||||
|
@@ -65,7 +63,7 @@ class MiscNews(SphinxDirective):
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
-def setup(app: Sphinx) -> ExtensionMetadata:
|
||||||
|
+def setup(app: "Sphinx") -> "ExtensionMetadata":
|
||||||
|
app.add_directive("miscnews", MiscNews)
|
||||||
|
|
||||||
|
return {
|
||||||
|
--- a/Doc/tools/extensions/patchlevel.py
|
||||||
|
+++ b/Doc/tools/extensions/patchlevel.py
|
||||||
|
@@ -3,7 +3,7 @@
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
-from typing import Literal, NamedTuple
|
||||||
|
+from typing import NamedTuple, Tuple
|
||||||
|
|
||||||
|
CPYTHON_ROOT = Path(
|
||||||
|
__file__, # cpython/Doc/tools/extensions/patchlevel.py
|
||||||
|
@@ -26,7 +26,7 @@ class version_info(NamedTuple): # noqa:
|
||||||
|
major: int #: Major release number
|
||||||
|
minor: int #: Minor release number
|
||||||
|
micro: int #: Patch release number
|
||||||
|
- releaselevel: Literal["alpha", "beta", "candidate", "final"]
|
||||||
|
+ releaselevel: str
|
||||||
|
serial: int #: Serial release number
|
||||||
|
|
||||||
|
|
||||||
|
@@ -37,7 +37,8 @@ def get_header_version_info() -> version
|
||||||
|
defines = {}
|
||||||
|
patchlevel_h = PATCHLEVEL_H.read_text(encoding="utf-8")
|
||||||
|
for line in patchlevel_h.splitlines():
|
||||||
|
- if (m := pat.match(line)) is not None:
|
||||||
|
+ m = pat.match(line)
|
||||||
|
+ if m is not None:
|
||||||
|
name, value = m.groups()
|
||||||
|
defines[name] = value
|
||||||
|
|
||||||
|
@@ -50,7 +51,7 @@ def get_header_version_info() -> version
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
-def format_version_info(info: version_info) -> tuple[str, str]:
|
||||||
|
+def format_version_info(info: version_info) -> Tuple[str, str]:
|
||||||
|
version = f"{info.major}.{info.minor}"
|
||||||
|
release = f"{info.major}.{info.minor}.{info.micro}"
|
||||||
|
if info.releaselevel != "final":
|
@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
--- a/Doc/tools/extensions/c_annotations.py
|
--- a/Doc/tools/extensions/c_annotations.py
|
||||||
+++ b/Doc/tools/extensions/c_annotations.py
|
+++ b/Doc/tools/extensions/c_annotations.py
|
||||||
@@ -118,7 +118,11 @@ def add_annotations(app: Sphinx, doctree
|
@@ -117,7 +117,11 @@ def add_annotations(app: Sphinx, doctree
|
||||||
state = app.env.domaindata["c_annotations"]
|
state = app.env.domaindata["c_annotations"]
|
||||||
refcount_data = state["refcount_data"]
|
refcount_data = state["refcount_data"]
|
||||||
stable_abi_data = state["stable_abi_data"]
|
stable_abi_data = state["stable_abi_data"]
|
||||||
@ -42,8 +42,8 @@
|
|||||||
|
|
||||||
--- a/Doc/tools/extensions/pyspecific.py
|
--- a/Doc/tools/extensions/pyspecific.py
|
||||||
+++ b/Doc/tools/extensions/pyspecific.py
|
+++ b/Doc/tools/extensions/pyspecific.py
|
||||||
@@ -27,7 +27,10 @@ from sphinx.locale import _ as sphinx_ge
|
@@ -25,7 +25,10 @@ from sphinx.domains.python import PyFunc
|
||||||
from sphinx.util import logging
|
from sphinx.locale import _ as sphinx_gettext
|
||||||
from sphinx.util.docutils import SphinxDirective
|
from sphinx.util.docutils import SphinxDirective
|
||||||
from sphinx.writers.text import TextWriter, TextTranslator
|
from sphinx.writers.text import TextWriter, TextTranslator
|
||||||
-from sphinx.util.display import status_iterator
|
-from sphinx.util.display import status_iterator
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
Index: Python-3.12.3/Lib/test/test_compile.py
|
---
|
||||||
===================================================================
|
Lib/test/test_compile.py | 5 +++++
|
||||||
--- Python-3.12.3.orig/Lib/test/test_compile.py
|
1 file changed, 5 insertions(+)
|
||||||
+++ Python-3.12.3/Lib/test/test_compile.py
|
|
||||||
|
--- a/Lib/test/test_compile.py
|
||||||
|
+++ b/Lib/test/test_compile.py
|
||||||
@@ -14,6 +14,9 @@ from test.support import (script_helper,
|
@@ -14,6 +14,9 @@ from test.support import (script_helper,
|
||||||
requires_specialization, C_RECURSION_LIMIT)
|
requires_specialization, C_RECURSION_LIMIT)
|
||||||
from test.support.os_helper import FakePath
|
from test.support.os_helper import FakePath
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
Create a Python.framework rather than a traditional Unix install. Optional
|
Create a Python.framework rather than a traditional Unix install. Optional
|
||||||
--- a/Misc/NEWS
|
--- a/Misc/NEWS
|
||||||
+++ b/Misc/NEWS
|
+++ b/Misc/NEWS
|
||||||
@@ -13832,7 +13832,7 @@ C API
|
@@ -14838,7 +14838,7 @@ C API
|
||||||
- bpo-40939: Removed documentation for the removed ``PyParser_*`` C API.
|
- bpo-40939: Removed documentation for the removed ``PyParser_*`` C API.
|
||||||
|
|
||||||
- bpo-43795: The list in :ref:`limited-api-list` now shows the public name
|
- bpo-43795: The list in :ref:`limited-api-list` now shows the public name
|
||||||
|
@ -2,11 +2,9 @@
|
|||||||
Lib/test/test_posix.py | 2 +-
|
Lib/test/test_posix.py | 2 +-
|
||||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||||
|
|
||||||
Index: Python-3.12.2/Lib/test/test_posix.py
|
--- a/Lib/test/test_posix.py
|
||||||
===================================================================
|
+++ b/Lib/test/test_posix.py
|
||||||
--- Python-3.12.2.orig/Lib/test/test_posix.py
|
@@ -437,7 +437,7 @@ class PosixTester(unittest.TestCase):
|
||||||
+++ Python-3.12.2/Lib/test/test_posix.py
|
|
||||||
@@ -433,7 +433,7 @@ class PosixTester(unittest.TestCase):
|
|
||||||
def test_posix_fadvise(self):
|
def test_posix_fadvise(self):
|
||||||
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
|
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
|
||||||
try:
|
try:
|
||||||
|
@ -1,3 +1,817 @@
|
|||||||
|
-------------------------------------------------------------------
|
||||||
|
Wed Feb 5 10:35:26 UTC 2025 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Update to 3.12.9:
|
||||||
|
- Tests
|
||||||
|
- gh-127906: Test the limited C API in test_cppext. Patch by
|
||||||
|
Victor Stinner.
|
||||||
|
- gh-127906: Backport test_cext from the main branch. Patch
|
||||||
|
by Victor Stinner.
|
||||||
|
- gh-127637: Add tests for the dis command-line
|
||||||
|
interface. Patch by Bénédikt Tran.
|
||||||
|
- Security
|
||||||
|
- gh-105704: When using urllib.parse.urlsplit() and
|
||||||
|
urllib.parse.urlparse() host parsing would not reject
|
||||||
|
domain names containing square brackets ([ and ]). Square
|
||||||
|
brackets are only valid for IPv6 and IPvFuture hosts
|
||||||
|
according to RFC 3986 Section 3.2.2. (CVE-2025-0938,
|
||||||
|
bsc#1236705)
|
||||||
|
- gh-127655: Fixed the
|
||||||
|
asyncio.selector_events._SelectorSocketTransport
|
||||||
|
transport not pausing writes for the protocol when
|
||||||
|
the buffer reaches the high water mark when using
|
||||||
|
asyncio.WriteTransport.writelines() (CVE-2024-12254,
|
||||||
|
bsc#1234290).
|
||||||
|
- gh-126108: Fix a possible NULL pointer dereference in
|
||||||
|
PySys_AddWarnOptionUnicode().
|
||||||
|
- gh-80222: Fix bug in the folding of quoted strings
|
||||||
|
when flattening an email message using a modern email
|
||||||
|
policy. Previously when a quoted string was folded so
|
||||||
|
that it spanned more than one line, the surrounding
|
||||||
|
quotes and internal escapes would be omitted. This could
|
||||||
|
theoretically be used to spoof header lines using a
|
||||||
|
carefully constructed quoted string if the resulting
|
||||||
|
rendered email was transmitted or re-parsed.
|
||||||
|
- gh-119511: Fix a potential denial of service in the imaplib
|
||||||
|
module. When connecting to a malicious server, it could
|
||||||
|
cause an arbitrary amount of memory to be allocated. On
|
||||||
|
many systems this is harmless as unused virtual memory is
|
||||||
|
only a mapping, but if this hit a virtual address size
|
||||||
|
limit it could lead to a MemoryError or other process
|
||||||
|
crash. On unusual systems or builds where all allocated
|
||||||
|
memory is touched and backed by actual ram or storage
|
||||||
|
it could’ve consumed resources doing so until similarly
|
||||||
|
crashing.
|
||||||
|
- Library
|
||||||
|
- gh-129502: Unlikely errors in preparing arguments for
|
||||||
|
ctypes callback are now handled in the same way as errors
|
||||||
|
raised in the callback of in converting the result of
|
||||||
|
the callback – using sys.unraisablehook() instead of
|
||||||
|
sys.excepthook() and not setting sys.last_exc and other
|
||||||
|
variables.
|
||||||
|
- gh-129403: Corrected ValueError message for asyncio.Barrier
|
||||||
|
and threading.Barrier.
|
||||||
|
- gh-129409: Fix an integer overflow in the csv module when
|
||||||
|
writing a data field larger than 2GB.
|
||||||
|
- gh-118761: Improve import time of subprocess by lazy
|
||||||
|
importing locale and signal. Patch by Taneli Hukkinen.
|
||||||
|
- gh-129346: In sqlite3, handle out-of-memory when creating
|
||||||
|
user-defined SQL functions.
|
||||||
|
- gh-128550: Removed an incorrect optimization relating
|
||||||
|
to eager tasks in asyncio.TaskGroup that resulted in
|
||||||
|
cancellations being missed.
|
||||||
|
- gh-128991: Release the enter frame reference within bdb
|
||||||
|
callback
|
||||||
|
- gh-128961: Fix a crash when setting state on an exhausted
|
||||||
|
array.array iterator.
|
||||||
|
- gh-128916: Do not attempt to set SO_REUSEPORT on sockets of
|
||||||
|
address families other than AF_INET and AF_INET6, as it is
|
||||||
|
meaningless with these address families, and the call with
|
||||||
|
fail with Linux kernel 6.12.9 and newer.
|
||||||
|
- gh-128679: Fix tracemalloc.stop() race condition. Fix
|
||||||
|
tracemalloc to support calling tracemalloc.stop() in
|
||||||
|
one thread, while another thread is tracing memory
|
||||||
|
allocations. Patch by Victor Stinner.
|
||||||
|
- gh-128562: Fix possible conflicts in generated tkinter
|
||||||
|
widget names if the widget class name ends with a digit.
|
||||||
|
- gh-128552: Fix cyclic garbage introduced
|
||||||
|
by asyncio.loop.create_task() and
|
||||||
|
asyncio.TaskGroup.create_task() holding a reference to the
|
||||||
|
created task if it is eager.
|
||||||
|
- gh-128479: Fix asyncio.staggered.staggered_race() leaking
|
||||||
|
tasks and issuing an unhandled exception.
|
||||||
|
- gh-88834: Unify the instance check for typing.Union and
|
||||||
|
types.UnionType: Union now uses the instance checks against
|
||||||
|
its parameters instead of the subclass checks.
|
||||||
|
- gh-128302: Fix
|
||||||
|
xml.dom.xmlbuilder.DOMEntityResolver.resolveEntity(), which
|
||||||
|
was broken by the Python 3.0 transition.
|
||||||
|
- gh-128302: Allow xml.dom.xmlbuilder.DOMParser.parse()
|
||||||
|
to correctly handle xml.dom.xmlbuilder.DOMInputSource
|
||||||
|
instances that only have a systemId attribute set.
|
||||||
|
- gh-112064: Fix incorrect handling of negative read sizes in
|
||||||
|
HTTPResponse.read. Patch by Yury Manushkin.
|
||||||
|
- gh-58956: Fixed a frame reference leak in bdb.
|
||||||
|
- gh-128131: Completely support random access of uncompressed
|
||||||
|
unencrypted read-only zip files obtained by ZipFile.open.
|
||||||
|
- gh-127975: Avoid reusing quote types in ast.unparse() if
|
||||||
|
not needed.
|
||||||
|
- gh-128014: Fix resetting the default window icon by passing
|
||||||
|
default='' to the tkinter method wm_iconbitmap().
|
||||||
|
- gh-115514: Fix exceptions and incomplete writes after
|
||||||
|
asyncio._SelectorTransport is closed before writes are
|
||||||
|
completed.
|
||||||
|
- gh-41872: Fix quick extraction of module docstrings from
|
||||||
|
a file in pydoc. It now supports docstrings with single
|
||||||
|
quotes, escape sequences, raw string literals, and other
|
||||||
|
Python syntax.
|
||||||
|
- gh-126742: Fix support of localized error messages reported
|
||||||
|
by dlerror(3) and gdbm_strerror in ctypes and dbm.gnu
|
||||||
|
functions respectively. Patch by Bénédikt Tran.
|
||||||
|
- gh-127870: Detect recursive calls in ctypes _as_parameter_
|
||||||
|
handling. Patch by Victor Stinner.
|
||||||
|
- gh-127847: Fix the position when doing interleaved seeks
|
||||||
|
and reads in uncompressed, unencrypted zip files returned
|
||||||
|
by zipfile.ZipFile.open().
|
||||||
|
- gh-127732: The platform module now correctly detects
|
||||||
|
Windows Server 2025.
|
||||||
|
- gh-93312: Include <sys/pidfd.h> to get os.PIDFD_NONBLOCK
|
||||||
|
constant. Patch by Victor Stinner.
|
||||||
|
- gh-83662: Add missing __class_getitem__ method to the
|
||||||
|
Python implementation of functools.partial(), to make it
|
||||||
|
compatible with the C version. This is mainly relevant for
|
||||||
|
alternative Python implementations like PyPy and GraalPy,
|
||||||
|
because CPython will usually use the C-implementation of
|
||||||
|
that function.
|
||||||
|
- gh-127586: multiprocessing.pool.Pool now properly restores
|
||||||
|
blocked signal handlers of the parent thread when creating
|
||||||
|
processes via either spawn or forkserver.
|
||||||
|
- gh-98188: Fix an issue in
|
||||||
|
email.message.Message.get_payload() where data cannot be
|
||||||
|
decoded if the Content Transfer Encoding mechanism contains
|
||||||
|
trailing whitespaces or additional junk text. Patch by Hui
|
||||||
|
Liu.
|
||||||
|
- gh-127257: In ssl, system call failures that OpenSSL
|
||||||
|
reports using ERR_LIB_SYS are now raised as OSError.
|
||||||
|
- gh-126775: Make linecache.checkcache() thread safe and GC
|
||||||
|
re-entrancy safe.
|
||||||
|
- gh-58956: Fixed a bug in pdb where sometimes the breakpoint
|
||||||
|
won’t trigger if it was set on a function which is already
|
||||||
|
in the call stack.
|
||||||
|
- gh-123401: The http.cookies module now supports parsing
|
||||||
|
obsolete RFC 850 date formats, in accordance with RFC 9110
|
||||||
|
requirements. Patch by Nano Zheng.
|
||||||
|
- gh-123085: In a bare call to importlib.resources.files(),
|
||||||
|
ensure the caller’s frame is properly detected when
|
||||||
|
importlib.resources is itself available as a compiled
|
||||||
|
module only (no source).
|
||||||
|
- gh-122431: readline.append_history_file() now raises a
|
||||||
|
ValueError when given a negative value.
|
||||||
|
- Documentation
|
||||||
|
- gh-125722: Require Sphinx 8.1.3 or later to build the
|
||||||
|
Python documentation. Patch by Adam Turner.
|
||||||
|
- gh-67206: Document that string.printable is not
|
||||||
|
printable in the POSIX sense. In particular,
|
||||||
|
string.printable.isprintable() returns False. Patch by
|
||||||
|
Bénédikt Tran.
|
||||||
|
- Core and Builtins
|
||||||
|
- gh-129345: Fix null pointer dereference in syslog.openlog()
|
||||||
|
when an audit hook raises an exception.
|
||||||
|
- gh-129093: Fix f-strings such as f'{expr=}' sometimes not
|
||||||
|
displaying the full expression when the expression contains
|
||||||
|
!=.
|
||||||
|
- gh-124363: Treat debug expressions in f-string as raw
|
||||||
|
strings. Patch by Pablo Galindo
|
||||||
|
- gh-128799: Add frame of except* to traceback when it wraps
|
||||||
|
a naked exception.
|
||||||
|
- gh-128078: Fix a SystemError when using anext() with a
|
||||||
|
default tuple value. Patch by Bénédikt Tran.
|
||||||
|
- gh-128079: Fix a bug where except* does not properly check
|
||||||
|
the return value of an ExceptionGroup’s split() function,
|
||||||
|
leading to a crash in some cases. Now when split() returns
|
||||||
|
an invalid object, except* raises a TypeError with the
|
||||||
|
original raised ExceptionGroup object chained to it.
|
||||||
|
- gh-127903: Objects/unicodeobject.c: fix a crash on DEBUG
|
||||||
|
builds in _copy_characters when there is nothing to copy.
|
||||||
|
- gh-127599: Fix statistics for increments of object
|
||||||
|
reference counts (in particular, when a reference count was
|
||||||
|
increased by more than 1 in a single operation).
|
||||||
|
- gh-111609: Respect end_offset in SyntaxError subclasses.
|
||||||
|
- gh-126862: Fix a possible overflow when a class inherits
|
||||||
|
from an absurd number of super-classes. Reported by Valery
|
||||||
|
Fedorenko. Patch by Bénédikt Tran.
|
||||||
|
- gh-117195: Avoid assertion failure for debug builds when
|
||||||
|
calling object.__sizeof__(1)
|
||||||
|
- C API
|
||||||
|
- gh-126554: Fix error handling in ctypes.CDLL objects which
|
||||||
|
could result in a crash in rare situations.
|
||||||
|
- gh-107249: Implement the Py_UNUSED macro for Windows MSVC
|
||||||
|
compiler. Patch by Victor Stinner.
|
||||||
|
- Build
|
||||||
|
- gh-129539: Don’t redefine EX_OK when the system has the
|
||||||
|
sysexits.h header.
|
||||||
|
- gh-128472: Skip BOLT optimization of functions using
|
||||||
|
computed gotos, fixing errors on build with LLVM 19.
|
||||||
|
- gh-123925: Fix building the curses module on platforms with
|
||||||
|
libncurses but without libncursesw.
|
||||||
|
- gh-128321: Set LIBS instead of LDFLAGS when checking if
|
||||||
|
sqlite3 library functions are available. This fixes the
|
||||||
|
ordering of linked libraries during checks, which was
|
||||||
|
incorrect when using a statically linked libsqlite3.
|
||||||
|
- Remove upstreamed patches:
|
||||||
|
- CVE-2024-12254-unbound-mem-buffering-SelectorSocketTransport.writelines.patch
|
||||||
|
- Add doc-py38-to-py36.patch to make documentation buildable on
|
||||||
|
SLE with older Sphinx.
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Mon Jan 27 09:02:35 UTC 2025 - Daniel Garcia <daniel.garcia@suse.com>
|
||||||
|
|
||||||
|
- Configure externally_managed with a bcond
|
||||||
|
https://en.opensuse.org/openSUSE:Python:Externally_managed
|
||||||
|
bsc#1228165
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Fri Dec 6 20:39:56 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Add CVE-2024-12254-unbound-mem-buffering-SelectorSocketTransport.writelines.patch
|
||||||
|
preventing exhaustion of memory (gh#python/cpython#127655,
|
||||||
|
bsc#1234290, CVE-2024-12254).
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Wed Dec 4 21:47:08 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Update to 3.12.8:
|
||||||
|
- Tools/Demos
|
||||||
|
- gh-126807: Fix extraction warnings in pygettext.py caused
|
||||||
|
by mistaking function definitions for function calls.
|
||||||
|
- Tests
|
||||||
|
- gh-126909: Fix test_os extended attribute tests to work on
|
||||||
|
filesystems with 1 KiB xattr size limit.
|
||||||
|
- gh-125041: Re-enable skipped tests for zlib on the
|
||||||
|
s390x architecture: only skip checks of the compressed
|
||||||
|
bytes, which can be different between zlib’s software
|
||||||
|
implementation and the hardware-accelerated implementation.
|
||||||
|
- gh-124295: Add translation tests to the argparse module.
|
||||||
|
- Security
|
||||||
|
- gh-126623: Upgrade libexpat to 2.6.4
|
||||||
|
- Library
|
||||||
|
- gh-127303: Publicly expose EXACT_TOKEN_TYPES in
|
||||||
|
token.__all__.
|
||||||
|
- gh-123967: Fix faulthandler for trampoline frames. If the
|
||||||
|
top-most frame is a trampoline frame, skip it. Patch by
|
||||||
|
Victor Stinner.
|
||||||
|
- gh-127182: Fix io.StringIO.__setstate__() crash, when None
|
||||||
|
was passed as the first value.
|
||||||
|
- gh-127217: Fix urllib.request.pathname2url() for paths
|
||||||
|
starting with multiple slashes on Posix.
|
||||||
|
- gh-127035: Fix shutil.which on Windows. Now it looks at
|
||||||
|
direct match if and only if the command ends with a PATHEXT
|
||||||
|
extension or X_OK is not in mode. Support extensionless
|
||||||
|
files if “.” is in PATHEXT. Support PATHEXT extensions that
|
||||||
|
end with a dot.
|
||||||
|
- gh-127078: Fix issue where urllib.request.url2pathname()
|
||||||
|
failed to discard an extra slash before a UNC drive in the
|
||||||
|
URL path on Windows.
|
||||||
|
- gh-126766: Fix issue where urllib.request.url2pathname()
|
||||||
|
failed to discard any ‘localhost’ authority present in the
|
||||||
|
URL.
|
||||||
|
- gh-126997: Fix support of STRING and GLOBAL opcodes with
|
||||||
|
non-ASCII arguments in pickletools. pickletools.dis()
|
||||||
|
now outputs non-ASCII bytes in STRING, BINSTRING and
|
||||||
|
SHORT_BINSTRING arguments as escaped (\xXX).
|
||||||
|
- gh-126618: Fix the representation of itertools.count
|
||||||
|
objects when the count value is sys.maxsize.
|
||||||
|
- gh-85168: Fix issue where urllib.request.url2pathname() and
|
||||||
|
pathname2url() always used UTF-8 when quoting and unquoting
|
||||||
|
file URIs. They now use the filesystem encoding and error
|
||||||
|
handler.
|
||||||
|
- gh-67877: Fix memory leaks when regular expression matching
|
||||||
|
terminates abruptly, either because of a signal or because
|
||||||
|
memory allocation fails.
|
||||||
|
- gh-126789: Fixed the values of sysconfig.get_config_vars(),
|
||||||
|
sysconfig.get_paths(), and their siblings when the site
|
||||||
|
initialization happens after sysconfig has built a cache
|
||||||
|
for sysconfig.get_config_vars().
|
||||||
|
- gh-126188: Update bundled pip to 24.3.1
|
||||||
|
- gh-126766: Fix issue where urllib.request.url2pathname()
|
||||||
|
failed to discard two leading slashes introducing an empty
|
||||||
|
authority section.
|
||||||
|
- gh-126727: locale.nl_langinfo(locale.ERA) now returns
|
||||||
|
multiple era description segments separated by
|
||||||
|
semicolons. Previously it only returned the first segment
|
||||||
|
on platforms with Glibc.
|
||||||
|
- gh-126699: Allow collections.abc.AsyncIterator to be a base
|
||||||
|
for Protocols.
|
||||||
|
- gh-104745: Limit starting a patcher (from
|
||||||
|
unittest.mock.patch() or unittest.mock.patch.object()) more
|
||||||
|
than once without stopping it
|
||||||
|
- gh-126595: Fix a crash when instantiating itertools.count
|
||||||
|
with an initial count of sys.maxsize on debug builds. Patch
|
||||||
|
by Bénédikt Tran.
|
||||||
|
- gh-120423: Fix issue where urllib.request.pathname2url()
|
||||||
|
mishandled Windows paths with embedded forward slashes.
|
||||||
|
- gh-126565: Improve performances of zipfile.Path.open() for
|
||||||
|
non-reading modes.
|
||||||
|
- gh-126505: Fix bugs in compiling case-insensitive regular
|
||||||
|
expressions with character classes containing non-BMP
|
||||||
|
characters: upper-case non-BMP character did was ignored
|
||||||
|
and the ASCII flag was ignored when matching a character
|
||||||
|
range whose upper bound is beyond the BMP region.
|
||||||
|
- gh-117378: Fixed the multiprocessing "forkserver"
|
||||||
|
start method forkserver process to correctly inherit
|
||||||
|
the parent’s sys.path during the importing of
|
||||||
|
multiprocessing.set_forkserver_preload() modules in the
|
||||||
|
same manner as sys.path is configured in workers before
|
||||||
|
executing work items.
|
||||||
|
This bug caused some forkserver module preloading to silently
|
||||||
|
fail to preload. This manifested as a performance degration
|
||||||
|
in child processes when the sys.path was required due to
|
||||||
|
additional repeated work in every worker.
|
||||||
|
It could also have a side effect of "" remaining in
|
||||||
|
sys.path during forkserver preload imports instead of the
|
||||||
|
absolute path from os.getcwd() at multiprocessing import time
|
||||||
|
used in the worker sys.path.
|
||||||
|
The sys.path differences between phases in the child
|
||||||
|
process could potentially have caused preload to import incorrect
|
||||||
|
things from the wrong location. We are unaware of that actually
|
||||||
|
having happened in practice.
|
||||||
|
- gh-125679: The multiprocessing.Lock and
|
||||||
|
multiprocessing.RLock repr values no longer say “unknown”
|
||||||
|
on macOS.
|
||||||
|
- gh-126476: Raise calendar.IllegalMonthError (now a subclass
|
||||||
|
of IndexError) for calendar.month() when the input month is
|
||||||
|
not correct.
|
||||||
|
- gh-126489: The Python implementation of pickle no longer
|
||||||
|
calls pickle.Pickler.persistent_id() for the result of
|
||||||
|
persistent_id().
|
||||||
|
- gh-126303: Fix pickling and copying of os.sched_param
|
||||||
|
objects.
|
||||||
|
- gh-126138: Fix a use-after-free crash on asyncio.Task
|
||||||
|
objects whose underlying coroutine yields an object that
|
||||||
|
implements an evil __getattribute__(). Patch by Nico
|
||||||
|
Posada.
|
||||||
|
- gh-126220: Fix crash in cProfile.Profile and
|
||||||
|
_lsprof.Profiler when their callbacks were directly called
|
||||||
|
with 0 arguments.
|
||||||
|
- gh-126212: Fix issue where urllib.request.pathname2url()
|
||||||
|
and url2pathname() removed slashes from Windows DOS drive
|
||||||
|
paths and URLs.
|
||||||
|
- gh-126205: Fix issue where urllib.request.pathname2url()
|
||||||
|
generated URLs beginning with four slashes (rather than
|
||||||
|
two) when given a Windows UNC path.
|
||||||
|
- gh-126105: Fix a crash in ast when the ast.AST._fields
|
||||||
|
attribute is deleted.
|
||||||
|
- gh-126106: Fixes a possible NULL pointer dereference in
|
||||||
|
ssl.
|
||||||
|
- gh-126080: Fix a use-after-free crash on asyncio.Task
|
||||||
|
objects for which the underlying event loop implements an
|
||||||
|
evil __getattribute__(). Reported by Nico-Posada. Patch by
|
||||||
|
Bénédikt Tran.
|
||||||
|
- gh-126083: Fixed a reference leak in asyncio.Task objects
|
||||||
|
when reinitializing the same object with a non-None
|
||||||
|
context. Patch by Nico Posada.
|
||||||
|
- gh-125984: Fix use-after-free crashes on asyncio.Future
|
||||||
|
objects for which the underlying event loop implements an
|
||||||
|
evil __getattribute__(). Reported by Nico-Posada. Patch by
|
||||||
|
Bénédikt Tran.
|
||||||
|
- gh-125969: Fix an out-of-bounds crash when an evil
|
||||||
|
asyncio.loop.call_soon() mutates the length of the internal
|
||||||
|
callbacks list. Patch by Bénédikt Tran.
|
||||||
|
- gh-125966: Fix a use-after-free crash in
|
||||||
|
asyncio.Future.remove_done_callback(). Patch by Bénédikt
|
||||||
|
Tran.
|
||||||
|
- gh-125789: Fix possible crash when mutating list of
|
||||||
|
callbacks returned by asyncio.Future._callbacks. It
|
||||||
|
now always returns a new copy in C implementation
|
||||||
|
_asyncio. Patch by Kumar Aditya.
|
||||||
|
- gh-124452: Fix an issue in
|
||||||
|
email.policy.EmailPolicy.header_source_parse() and
|
||||||
|
email.policy.Compat32.header_source_parse() that introduced
|
||||||
|
spurious leading whitespaces into header values when the
|
||||||
|
header includes a newline character after the header name
|
||||||
|
delimiter (:) and before the value.
|
||||||
|
- gh-125884: Fixed the bug for pdb where it can’t set
|
||||||
|
breakpoints on functions with certain annotations.
|
||||||
|
- gh-125355: Fix several bugs in
|
||||||
|
argparse.ArgumentParser.parse_intermixed_args().
|
||||||
|
The parser no longer changes temporarily during parsing.
|
||||||
|
Default values are not processed twice.
|
||||||
|
Required mutually exclusive groups containing positional
|
||||||
|
arguments are now supported.
|
||||||
|
The missing arguments report now includes the names of
|
||||||
|
all required optional and positional arguments.
|
||||||
|
Unknown options can be intermixed with positional
|
||||||
|
arguments in parse_known_intermixed_args().
|
||||||
|
- gh-125682: Reject non-ASCII digits in the Python
|
||||||
|
implementation of json.loads() conforming to the JSON
|
||||||
|
specification.
|
||||||
|
- gh-125660: Reject invalid unicode escapes for Python
|
||||||
|
implementation of json.loads().
|
||||||
|
- gh-125259: Fix the notes removal logic for errors thrown in
|
||||||
|
enum initialization.
|
||||||
|
- gh-125519: Improve traceback if importlib.reload() is
|
||||||
|
called with an object that is not a module. Patch by Alex
|
||||||
|
Waygood.
|
||||||
|
- gh-125451: Fix deadlock when
|
||||||
|
concurrent.futures.ProcessPoolExecutor shuts down
|
||||||
|
concurrently with an error when feeding a job to a worker
|
||||||
|
process.
|
||||||
|
- gh-125422: Fixed the bug where pdb and bdb can step into
|
||||||
|
the bottom caller frame.
|
||||||
|
- gh-100141: Fixed the bug where pdb will be stuck in an
|
||||||
|
infinite loop when debugging an empty file.
|
||||||
|
- gh-53203: Fix time.strptime() for %c, %x and %X formats
|
||||||
|
in many locales that use non-ASCII digits, like Persian,
|
||||||
|
Burmese, Odia and Shan.
|
||||||
|
- gh-125254: Fix a bug where ArgumentError includes the
|
||||||
|
incorrect ambiguous option in argparse.
|
||||||
|
- gh-61011: Fix inheritance of nested mutually
|
||||||
|
exclusive groups from parent parser in
|
||||||
|
argparse.ArgumentParser. Previously, all nested mutually
|
||||||
|
exclusive groups lost their connection to the group
|
||||||
|
containing them and were displayed as belonging directly to
|
||||||
|
the parser.
|
||||||
|
- gh-52551: Fix encoding issues in time.strftime(), the
|
||||||
|
strftime() method of the datetime classes datetime, date
|
||||||
|
and time and formatting of these classes. Characters
|
||||||
|
not encodable in the current locale are now acceptable
|
||||||
|
in the format string. Surrogate pairs and sequence
|
||||||
|
of surrogatescape-encoded bytes are no longer
|
||||||
|
recombinated. Embedded null character no longer terminates
|
||||||
|
the format string.
|
||||||
|
- gh-125118: Don’t copy arbitrary values to _Bool in the
|
||||||
|
struct module.
|
||||||
|
- gh-125069: Fix an issue where providing a pathlib.PurePath
|
||||||
|
object as an initializer argument to a second PurePath
|
||||||
|
object with a different flavour resulted in arguments to
|
||||||
|
the former object’s initializer being joined by the latter
|
||||||
|
object’s flavour.
|
||||||
|
- gh-124969: Fix locale.nl_langinfo(locale.ALT_DIGITS) on
|
||||||
|
platforms with glibc. Now it returns a string consisting of
|
||||||
|
up to 100 semicolon-separated symbols (an empty string in
|
||||||
|
most locales) on all Posix platforms. Previously it only
|
||||||
|
returned the first symbol or an empty string.
|
||||||
|
- gh-124958: Fix refcycles in exceptions raised from
|
||||||
|
asyncio.TaskGroup and the python implementation of
|
||||||
|
asyncio.Future
|
||||||
|
- gh-53203: Fix time.strptime() for %c and %x formats in many
|
||||||
|
locales: Arabic, Bislama, Breton, Bodo, Kashubian, Chuvash,
|
||||||
|
Estonian, French, Irish, Ge’ez, Gurajati, Manx Gaelic,
|
||||||
|
Hebrew, Hindi, Chhattisgarhi, Haitian Kreyol, Japanese,
|
||||||
|
Kannada, Korean, Marathi, Malay, Norwegian, Nynorsk,
|
||||||
|
Punjabi, Rajasthani, Tok Pisin, Yoruba, Yue Chinese,
|
||||||
|
Yau/Nungon and Chinese.
|
||||||
|
- gh-124917: Allow calling os.path.exists() and
|
||||||
|
os.path.lexists() with keyword arguments on Windows. Fixes
|
||||||
|
a regression in 3.12.4.
|
||||||
|
- gh-124653: Fix detection of the minimal Queue API needed by
|
||||||
|
the logging module. Patch by Bénédikt Tran.
|
||||||
|
- gh-124858: Fix reference cycles left in tracebacks
|
||||||
|
in asyncio.open_connection() when used with
|
||||||
|
happy_eyeballs_delay
|
||||||
|
- gh-124390: Fixed AssertionError when using
|
||||||
|
asyncio.staggered.staggered_race() with
|
||||||
|
asyncio.eager_task_factory.
|
||||||
|
- gh-124651: Properly quote template strings in venv
|
||||||
|
activation scripts.
|
||||||
|
- gh-124594: All asyncio REPL prompts run in the same
|
||||||
|
context. Contributed by Bartosz Sławecki.
|
||||||
|
- gh-120378: Fix a crash related to an integer overflow in
|
||||||
|
curses.resizeterm() and curses.resize_term().
|
||||||
|
- gh-123884: Fixed bug in itertools.tee() handling of other
|
||||||
|
tee inputs (a tee in a tee). The output now has the
|
||||||
|
promised n independent new iterators. Formerly, the first
|
||||||
|
iterator was identical (not independent) to the input
|
||||||
|
iterator. This would sometimes give surprising results.
|
||||||
|
- gh-123978: Remove broken time.thread_time() and
|
||||||
|
time.thread_time_ns() on NetBSD.
|
||||||
|
- gh-124008: Fix possible crash (in debug build), incorrect
|
||||||
|
output or returning incorrect value from raw binary write()
|
||||||
|
when writing to console on Windows.
|
||||||
|
- gh-123370: Fix the canvas not clearing after running
|
||||||
|
turtledemo clock.
|
||||||
|
- gh-120754: Update unbounded read calls in zipfile to
|
||||||
|
specify an explicit size putting a limit on how much data
|
||||||
|
they may read. This also updates handling around ZIP max
|
||||||
|
comment size to match the standard instead of reading
|
||||||
|
comments that are one byte too long.
|
||||||
|
- gh-70764: Fixed an issue where inspect.getclosurevars()
|
||||||
|
would incorrectly classify an attribute name as a global
|
||||||
|
variable when the name exists both as an attribute name and
|
||||||
|
a global variable.
|
||||||
|
- gh-119826: Always return an absolute path for
|
||||||
|
os.path.abspath() on Windows.
|
||||||
|
- gh-117766: Always use str() to print choices in argparse.
|
||||||
|
- gh-101955: Fix SystemError when match regular expression
|
||||||
|
pattern containing some combination of possessive
|
||||||
|
quantifier, alternative and capture group.
|
||||||
|
- gh-88110: Fixed multiprocessing.Process reporting a
|
||||||
|
.exitcode of 1 even on success when using the "fork" start
|
||||||
|
method while using a concurrent.futures.ThreadPoolExecutor.
|
||||||
|
- gh-71936: Fix a race condition in
|
||||||
|
multiprocessing.pool.Pool.
|
||||||
|
- bpo-46128: Strip unittest.IsolatedAsyncioTestCase stack
|
||||||
|
frames from reported stacktraces.
|
||||||
|
- bpo-14074: Fix argparse metavar processing to allow
|
||||||
|
positional arguments to have a tuple metavar.
|
||||||
|
- IDLE
|
||||||
|
- gh-122392: Increase currently inadequate vertical spacing
|
||||||
|
for the IDLE browsers (path, module, and stack) on
|
||||||
|
high-resolution monitors.
|
||||||
|
- Documentation
|
||||||
|
- gh-125277: Require Sphinx 7.2.6 or later to build the
|
||||||
|
Python documentation. Patch by Adam Turner.
|
||||||
|
- gh-125018: The importlib.metadata documentation now
|
||||||
|
includes semantic cross-reference targets for the
|
||||||
|
significant documented APIs. This means intersphinx
|
||||||
|
references like importlib.metadata.version() will now work
|
||||||
|
as expected.
|
||||||
|
- gh-121277: Writers of CPython’s documentation can now use
|
||||||
|
next as the version for the versionchanged, versionadded,
|
||||||
|
deprecated directives.
|
||||||
|
- gh-60712: Include the object type in the lists of
|
||||||
|
documented types. Change by Furkan Onder and Martin Panter.
|
||||||
|
- Core and Builtins
|
||||||
|
- gh-113841: Fix possible undefined behavior division by zero
|
||||||
|
in complex’s _Py_c_pow().
|
||||||
|
- gh-126341: Now ValueError is raised instead of SystemError
|
||||||
|
when trying to iterate over a released memoryview object.
|
||||||
|
- gh-126066: Fix importlib to not write an incomplete
|
||||||
|
.pyc files when a ulimit or some other operating system
|
||||||
|
mechanism is preventing the write to go through fully.
|
||||||
|
- gh-126139: Provide better error location when attempting to
|
||||||
|
use a future statement with an unknown future feature.
|
||||||
|
- gh-125008: Fix tokenize.untokenize() producing invalid
|
||||||
|
syntax for double braces preceded by certain escape
|
||||||
|
characters.
|
||||||
|
- gh-123378: Fix a crash in the __str__() method of
|
||||||
|
UnicodeError objects when the UnicodeError.start and
|
||||||
|
UnicodeError.end values are invalid or out-of-range. Patch
|
||||||
|
by Bénédikt Tran.
|
||||||
|
- gh-116510: Fix a crash caused by immortal interned strings
|
||||||
|
being shared between sub-interpreters that use basic
|
||||||
|
single-phase init. In that case, the string can be used
|
||||||
|
by an interpreter that outlives the interpreter that
|
||||||
|
created and interned it. For interpreters that share
|
||||||
|
obmalloc state, also share the interned dict with the main
|
||||||
|
interpreter.
|
||||||
|
- gh-118950: Fix bug where SSLProtocol.connection_lost wasn’t
|
||||||
|
getting called when OSError was thrown on writing to
|
||||||
|
socket.
|
||||||
|
- gh-113570: Fixed a bug in reprlib.repr where it incorrectly
|
||||||
|
called the repr method on shadowed Python built-in types.
|
||||||
|
- gh-109746: If _thread.start_new_thread() fails to start a
|
||||||
|
new thread, it deletes its state from interpreter and thus
|
||||||
|
avoids its repeated cleanup on finalization.
|
||||||
|
- C API
|
||||||
|
- gh-113601: Removed debug build assertions related to
|
||||||
|
interning strings, which were falsely triggered by stable
|
||||||
|
ABI extensions.
|
||||||
|
- Build
|
||||||
|
- gh-89640: Hard-code float word ordering as little endian on
|
||||||
|
WASM.
|
||||||
|
- gh-89640: Improve detection of float word ordering on Linux
|
||||||
|
when link-time optimizations are enabled.
|
||||||
|
- Remove upstreamed patches:
|
||||||
|
- CVE-2024-9287-venv_path_unquoted.patch
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Thu Nov 28 22:20:25 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Update doc-py38-to-py36.patch to include str.removeprefix
|
||||||
|
replacement.
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Thu Nov 14 07:06:20 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Remove -IVendor/ from python-config boo#1231795
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Thu Oct 24 16:09:00 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Add CVE-2024-9287-venv_path_unquoted.patch to properly quote
|
||||||
|
path names provided when creating a virtual environment
|
||||||
|
(bsc#1232241, CVE-2024-9287)
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Thu Oct 24 16:09:00 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Add CVE-2024-9287-venv_path_unquoted.patch to properly quote
|
||||||
|
path names provided when creating a virtual environment
|
||||||
|
(bsc#1232241, CVE-2024-9287)
|
||||||
|
- Update doc-py38-to-py36.patch to include str.removeprefix
|
||||||
|
replacement.
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Tue Oct 1 15:32:06 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Update to 3.12.7:
|
||||||
|
- Tests
|
||||||
|
- gh-124378: Updated test_ttk to pass with Tcl/Tk 8.6.15.
|
||||||
|
- Security
|
||||||
|
- gh-122792: Changed IPv4-mapped ipaddress.IPv6Address to
|
||||||
|
consistently use the mapped IPv4 address value for deciding
|
||||||
|
properties. Properties which have their behavior fixed are
|
||||||
|
is_multicast, is_reserved, is_link_local, is_global, and
|
||||||
|
is_unspecified.
|
||||||
|
- Library
|
||||||
|
- gh-116850: Fix argparse for namespaces with not directly
|
||||||
|
writable dict (e.g. classes).
|
||||||
|
- gh-58573: Fix conflicts between abbreviated long options in
|
||||||
|
the parent parser and subparsers in argparse.
|
||||||
|
- gh-61181: Fix support of choices with string value in
|
||||||
|
argparse. Substrings of the specified string no longer
|
||||||
|
considered valid values.
|
||||||
|
- gh-80259: Fix argparse support of positional arguments with
|
||||||
|
nargs='?', default=argparse.SUPPRESS and specified type.
|
||||||
|
- gh-124498: Fix typing.TypeAliasType not to be generic, when
|
||||||
|
type_params is an empty tuple.
|
||||||
|
- gh-124345: argparse vim supports abbreviated single-dash
|
||||||
|
long options separated by = from its value.
|
||||||
|
- gh-104860: Fix disallowing abbreviation of single-dash long
|
||||||
|
options in argparse with allow_abbrev=False.
|
||||||
|
- gh-63143: Fix parsing mutually exclusive arguments in
|
||||||
|
argparse. Arguments with the value identical to the default
|
||||||
|
value (e.g. booleans, small integers, empty or 1-character
|
||||||
|
strings) are no longer considered “not present”.
|
||||||
|
- gh-72795: Positional arguments with nargs equal to '*' or
|
||||||
|
argparse.REMAINDER are no longer required. This allows to
|
||||||
|
use positional argument with nargs='*' and without default
|
||||||
|
in mutually exclusive group and improves error message
|
||||||
|
about required arguments.
|
||||||
|
- gh-59317: Fix parsing positional argument with nargs equal
|
||||||
|
to '?' or '*' if it is preceded by an option and another
|
||||||
|
positional argument.
|
||||||
|
- gh-53780: argparse now ignores the first "--" (double dash)
|
||||||
|
between an option and command.
|
||||||
|
- gh-124217: Add RFC 9637 reserved IPv6 block 3fff::/20 in
|
||||||
|
ipaddress module.
|
||||||
|
- gh-124248: Fixed potential crash when using struct to
|
||||||
|
process zero-width ‘Pascal string’ fields (0p).
|
||||||
|
- gh-81691: Fix handling of multiple "--" (double dashes)
|
||||||
|
in argparse. Only the first one has now been removed, all
|
||||||
|
subsequent ones are now taken literally.
|
||||||
|
- gh-87041: Fix a bug in argparse where lengthy subparser
|
||||||
|
argument help is incorrectly indented.
|
||||||
|
- gh-124171: Add workaround for broken fmod() implementations
|
||||||
|
on Windows, that loose zero sign (e.g. fmod(-10, 1) returns
|
||||||
|
0.0). Patch by Sergey B Kirpichev.
|
||||||
|
- gh-123934: Fix unittest.mock.MagicMock reseting magic
|
||||||
|
methods return values after .reset_mock(return_value=True)
|
||||||
|
was called.
|
||||||
|
- gh-123935: Fix parent slots detection for dataclasses that
|
||||||
|
inherit from classes with __dictoffset__.
|
||||||
|
- gh-123892: Add "_wmi" to sys.stdlib_module_names. Patch by
|
||||||
|
Victor Stinner.
|
||||||
|
- gh-116810: Resolve a memory leak introduced in CPython
|
||||||
|
3.10’s ssl when the ssl.SSLSocket.session property was
|
||||||
|
accessed. Speeds up read and write access to said property
|
||||||
|
by no longer unnecessarily cloning session objects via
|
||||||
|
serialization.
|
||||||
|
- gh-121735: When working with zip archives,
|
||||||
|
importlib.resources now properly honors module-adjacent
|
||||||
|
references (e.g. files(pkg.mod) and not just files(pkg)).
|
||||||
|
- gh-119004: Fix a crash in OrderedDict.__eq__ when operands
|
||||||
|
are mutated during the check. Patch by Bénédikt Tran.
|
||||||
|
- bpo-44864: Do not translate user-provided strings in
|
||||||
|
argparse.ArgumentParser.
|
||||||
|
- IDLE
|
||||||
|
- gh-112938: Fix uninteruptable hang when Shell gets rapid
|
||||||
|
continuous output.
|
||||||
|
- Core and Builtins
|
||||||
|
- gh-116510: Fix a bug that can cause a crash when
|
||||||
|
sub-interpreters use “basic” single-phase extension
|
||||||
|
modules. Shared objects could refer to PyGC_Head nodes that
|
||||||
|
had been freed as part of interpreter cleanup.
|
||||||
|
- gh-124188: Fix reading and decoding a line from the source
|
||||||
|
file witn non-UTF-8 encoding for syntax errors raised in
|
||||||
|
the compiler.
|
||||||
|
- gh-77894: Fix possible crash in the garbage collector when
|
||||||
|
it tries to break a reference loop containing a memoryview
|
||||||
|
object. Now a memoryview object can only be cleared if
|
||||||
|
there are no buffers that refer it.
|
||||||
|
- gh-98442: Fix too wide source locations of the cleanup
|
||||||
|
instructions of a with statement.
|
||||||
|
- gh-113993: Strings interned with sys.intern() are again
|
||||||
|
garbage-collected when no longer used, as per the
|
||||||
|
documentation. Strings interned with the C function
|
||||||
|
PyUnicode_InternInPlace() are still immortal. Internals of
|
||||||
|
the string interning mechanism have been changed. This may
|
||||||
|
affect performance and identities of str objects.
|
||||||
|
- C API
|
||||||
|
- gh-113993: PyUnicode_InternInPlace() no longer prevents its
|
||||||
|
argument from being garbage collected.
|
||||||
|
- Several functions that take char * are now documented
|
||||||
|
as possibly preventing string objects from being
|
||||||
|
garbage collected; refer to their documentation
|
||||||
|
for details: PyUnicode_InternFromString(),
|
||||||
|
PyDict_SetItemString(), PyObject_SetAttrString(),
|
||||||
|
PyObject_DelAttrString(), PyUnicode_InternFromString(), and
|
||||||
|
PyModule_Add* convenience functions.
|
||||||
|
- Build
|
||||||
|
- gh-124487: Windows builds now use Windows 8.1 as their API
|
||||||
|
baseline (installation already required Windows 8.1).
|
||||||
|
- gh-123917: Fix the check for the crypt() function in the
|
||||||
|
configure script. Patch by Paul Smith and Victor Stinner.
|
||||||
|
- Change previous removal of *.pyc files with rm -f instead of
|
||||||
|
||/bin/true (bsc#1230906).
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Mon Sep 30 09:42:13 UTC 2024 - Bernhard Wiedemann <bwiedemann@suse.com>
|
||||||
|
|
||||||
|
- Drop .pyc files from docdir for reproducible builds
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Fri Sep 13 17:09:37 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Add doc-py38-to-py36.patch making building documentation
|
||||||
|
compatible with Python 3.6, which runs Sphinx on SLE.
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Sat Sep 7 21:49:34 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Update to 3.12.6:
|
||||||
|
- Tests
|
||||||
|
- gh-101525: Skip test_gdb if the binary is relocated by
|
||||||
|
BOLT. Patch by Donghee Na.
|
||||||
|
- Security
|
||||||
|
- gh-123678: Upgrade libexpat to 2.6.3
|
||||||
|
- gh-121285: Remove backtracking from tarfile header parsing
|
||||||
|
for hdrcharset, PAX, and GNU sparse headers (bsc#1230227,
|
||||||
|
CVE-2024-6232).
|
||||||
|
- Library
|
||||||
|
- gh-123270: Applied a more surgical fix for malformed
|
||||||
|
payloads in zipfile.Path causing infinite loops (gh-122905)
|
||||||
|
without breaking contents using legitimate characters
|
||||||
|
(bsc#1229704, CVE-2024-8088).
|
||||||
|
- gh-123213: xml.etree.ElementTree.Element.extend() and
|
||||||
|
Element assignment no longer hide the internal exception if
|
||||||
|
an erronous generator is passed. Patch by Bar Harel.
|
||||||
|
- gh-85110: Preserve relative path in URL without netloc in
|
||||||
|
urllib.parse.urlunsplit() and urllib.parse.urlunparse().
|
||||||
|
- gh-123067: Fix quadratic complexity in parsing "-quoted
|
||||||
|
cookie values with backslashes by http.cookies
|
||||||
|
(bsc#1229596, CVE-2024-7592)
|
||||||
|
- gh-122903: zipfile.Path.glob now correctly matches
|
||||||
|
directories instead of silently omitting them.
|
||||||
|
- gh-122905: zipfile.Path objects now sanitize names from the
|
||||||
|
zipfile.
|
||||||
|
- gh-122695: Fixed double-free when using gc.get_referents()
|
||||||
|
with a freed asyncio.Future iterator.
|
||||||
|
- gh-116263: logging.handlers.RotatingFileHandler no longer
|
||||||
|
rolls over empty log files.
|
||||||
|
- gh-118814: Fix the typing.TypeVar constructor when name is
|
||||||
|
passed by keyword.
|
||||||
|
- gh-122478: Remove internal frames from tracebacks
|
||||||
|
shown in code.InteractiveInterpreter with non-default
|
||||||
|
sys.excepthook(). Save correct tracebacks in
|
||||||
|
sys.last_traceback and update __traceback__ attribute of
|
||||||
|
sys.last_value and sys.last_exc.
|
||||||
|
- gh-113785: csv now correctly parses numeric fields (when
|
||||||
|
used with csv.QUOTE_NONNUMERIC) which start with an escape
|
||||||
|
character.
|
||||||
|
- gh-112182: asyncio.futures.Future.set_exception() now
|
||||||
|
transforms StopIteration into RuntimeError instead of
|
||||||
|
hanging or other misbehavior. Patch contributed by Jamie
|
||||||
|
Phan.
|
||||||
|
- gh-108172: webbrowser honors OS preferred browser on Linux
|
||||||
|
when its desktop entry name contains the text of a known
|
||||||
|
browser name.
|
||||||
|
- gh-102988: email.utils.getaddresses() and
|
||||||
|
email.utils.parseaddr() now return ('', '') 2-tuples
|
||||||
|
in more situations where invalid email addresses are
|
||||||
|
encountered instead of potentially inaccurate values. Add
|
||||||
|
optional strict parameter to these two functions: use
|
||||||
|
strict=False to get the old behavior, accept malformed
|
||||||
|
inputs. getattr(email.utils, 'supports_strict_parsing',
|
||||||
|
False) can be use to check if the strict paramater is
|
||||||
|
available. Patch by Thomas Dwyer and Victor Stinner to
|
||||||
|
improve the CVE-2023-27043 fix.
|
||||||
|
- gh-99437: runpy.run_path() now decodes path-like objects,
|
||||||
|
making sure __file__ and sys.argv[0] of the module being
|
||||||
|
run are always strings.
|
||||||
|
- IDLE
|
||||||
|
- gh-120083: Add explicit black IDLE Hovertip foreground
|
||||||
|
color needed for recent macOS. Fixes Sonoma showing
|
||||||
|
unreadable white on pale yellow. Patch by John Riggles.
|
||||||
|
- Core and Builtins
|
||||||
|
- gh-123321: Prevent Parser/myreadline race condition from
|
||||||
|
segfaulting on multi-threaded use. Patch by Bar Harel and
|
||||||
|
Amit Wienner.
|
||||||
|
- gh-122982: Extend the deprecation period for bool inversion
|
||||||
|
(~) by two years.
|
||||||
|
- gh-123229: Fix valgrind warning by initializing the
|
||||||
|
f-string buffers to 0 in the tokenizer. Patch by Pablo
|
||||||
|
Galindo
|
||||||
|
- gh-123142: Fix too-wide source location in exception
|
||||||
|
tracebacks coming from broken iterables in comprehensions.
|
||||||
|
- gh-123048: Fix a bug where pattern matching code could emit
|
||||||
|
a JUMP_FORWARD with no source location.
|
||||||
|
- gh-123083: Fix a potential use-after-free in
|
||||||
|
STORE_ATTR_WITH_HINT.
|
||||||
|
- gh-122527: Fix a crash that occurred when a
|
||||||
|
PyStructSequence was deallocated after its type’s
|
||||||
|
dictionary was cleared by the GC. The type’s tp_basicsize
|
||||||
|
now accounts for non-sequence fields that aren’t included
|
||||||
|
in the Py_SIZE of the sequence.
|
||||||
|
- gh-93691: Fix source locations of instructions generated
|
||||||
|
for with statements.
|
||||||
|
- Build
|
||||||
|
- gh-123297: Propagate the value of LDFLAGS to LDCXXSHARED in
|
||||||
|
sysconfig. Patch by Pablo Galindo
|
||||||
|
- Remove upstreamed patches:
|
||||||
|
- CVE-2023-27043-email-parsing-errors.patch
|
||||||
|
- CVE-2024-8088-inf-loop-zipfile_Path.patch
|
||||||
|
- CVE-2023-6597-TempDir-cleaning-symlink.patch
|
||||||
|
- gh120226-fix-sendfile-test-kernel-610.patch
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Mon Sep 2 09:44:26 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
- Add gh120226-fix-sendfile-test-kernel-610.patch to avoid
|
||||||
|
failing test_sendfile_close_peer_in_the_middle_of_receiving
|
||||||
|
tests on Linux >= 6.10 (GH-120227).
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
-------------------------------------------------------------------
|
||||||
Wed Aug 28 16:54:34 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
Wed Aug 28 16:54:34 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#
|
#
|
||||||
# spec file for package python312
|
# spec file for package python312
|
||||||
#
|
#
|
||||||
# Copyright (c) 2024 SUSE LLC
|
# Copyright (c) 2025 SUSE LLC
|
||||||
#
|
#
|
||||||
# All modifications and additions to the file contributed by third parties
|
# All modifications and additions to the file contributed by third parties
|
||||||
# remain the property of their copyright owners, unless otherwise agreed
|
# remain the property of their copyright owners, unless otherwise agreed
|
||||||
@ -42,6 +42,14 @@
|
|||||||
%bcond_with profileopt
|
%bcond_with profileopt
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
|
# Only for Tumbleweed
|
||||||
|
# https://en.opensuse.org/openSUSE:Python:Externally_managed
|
||||||
|
%if 0%{?suse_version} > 1600
|
||||||
|
%bcond_without externally_managed
|
||||||
|
%else
|
||||||
|
%bcond_with externally_managed
|
||||||
|
%endif
|
||||||
|
|
||||||
%define python_pkg_name python312
|
%define python_pkg_name python312
|
||||||
%if "%{python_pkg_name}" == "%{primary_python}"
|
%if "%{python_pkg_name}" == "%{primary_python}"
|
||||||
%define primary_interpreter 1
|
%define primary_interpreter 1
|
||||||
@ -110,16 +118,17 @@
|
|||||||
# _md5.cpython-38m-x86_64-linux-gnu.so
|
# _md5.cpython-38m-x86_64-linux-gnu.so
|
||||||
%define dynlib() %{sitedir}/lib-dynload/%{1}.cpython-%{abi_tag}-%{archname}-%{_os}%{?_gnu}%{?armsuffix}.so
|
%define dynlib() %{sitedir}/lib-dynload/%{1}.cpython-%{abi_tag}-%{archname}-%{_os}%{?_gnu}%{?armsuffix}.so
|
||||||
Name: %{python_pkg_name}%{psuffix}
|
Name: %{python_pkg_name}%{psuffix}
|
||||||
Version: 3.12.5
|
Version: 3.12.9
|
||||||
Release: 0
|
Release: 0
|
||||||
Summary: Python 3 Interpreter
|
Summary: Python 3 Interpreter
|
||||||
License: Python-2.0
|
License: Python-2.0
|
||||||
URL: https://www.python.org/
|
URL: https://www.python.org/
|
||||||
Source0: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz
|
Source0: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz
|
||||||
Source1: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.asc
|
Source1: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.asc
|
||||||
Source2: baselibs.conf
|
Source2: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.sigstore
|
||||||
Source3: README.SUSE
|
Source3: baselibs.conf
|
||||||
Source4: externally_managed.in
|
Source4: README.SUSE
|
||||||
|
Source5: externally_managed.in
|
||||||
Source7: macros.python3
|
Source7: macros.python3
|
||||||
Source8: import_failed.py
|
Source8: import_failed.py
|
||||||
Source9: import_failed.map
|
Source9: import_failed.map
|
||||||
@ -168,13 +177,6 @@ Patch34: skip-test_pyobject_freed_is_freed.patch
|
|||||||
# PATCH-FIX-SLE fix_configure_rst.patch bpo#43774 mcepl@suse.com
|
# PATCH-FIX-SLE fix_configure_rst.patch bpo#43774 mcepl@suse.com
|
||||||
# remove duplicate link targets and make documentation with old Sphinx in SLE
|
# remove duplicate link targets and make documentation with old Sphinx in SLE
|
||||||
Patch35: fix_configure_rst.patch
|
Patch35: fix_configure_rst.patch
|
||||||
# PATCH-FIX-UPSTREAM CVE-2023-27043-email-parsing-errors.patch bsc#1210638 mcepl@suse.com
|
|
||||||
# Detect email address parsing errors and return empty tuple to
|
|
||||||
# indicate the parsing error (old API)
|
|
||||||
Patch36: CVE-2023-27043-email-parsing-errors.patch
|
|
||||||
# PATCH-FIX-UPSTREAM CVE-2023-6597-TempDir-cleaning-symlink.patch bsc#1219666 mcepl@suse.com
|
|
||||||
# tempfile.TemporaryDirectory: fix symlink bug in cleanup (from gh#python/cpython!99930)
|
|
||||||
Patch38: CVE-2023-6597-TempDir-cleaning-symlink.patch
|
|
||||||
# PATCH-FIX-OPENSUSE CVE-2023-52425-libexpat-2.6.0-backport-15.6.patch
|
# PATCH-FIX-OPENSUSE CVE-2023-52425-libexpat-2.6.0-backport-15.6.patch
|
||||||
# This problem on libexpat is patched on 15.6 without version
|
# This problem on libexpat is patched on 15.6 without version
|
||||||
# update, this patch changes the tests to match the libexpat provided
|
# update, this patch changes the tests to match the libexpat provided
|
||||||
@ -186,9 +188,9 @@ Patch40: fix-test-recursion-limit-15.6.patch
|
|||||||
# PATCH-FIX-SLE docs-docutils_014-Sphinx_420.patch bsc#[0-9]+ mcepl@suse.com
|
# PATCH-FIX-SLE docs-docutils_014-Sphinx_420.patch bsc#[0-9]+ mcepl@suse.com
|
||||||
# related to gh#python/cpython#119317
|
# related to gh#python/cpython#119317
|
||||||
Patch41: docs-docutils_014-Sphinx_420.patch
|
Patch41: docs-docutils_014-Sphinx_420.patch
|
||||||
# PATCH-FIX-UPSTREAM CVE-2024-8088-inf-loop-zipfile_Path.patch bsc#1229704 mcepl@suse.com
|
# PATCH-FIX-SLE doc-py38-to-py36.patch mcepl@suse.com
|
||||||
# avoid denial of service in zipfile
|
# Make documentation extensions working with Python 3.6
|
||||||
Patch42: CVE-2024-8088-inf-loop-zipfile_Path.patch
|
Patch44: doc-py38-to-py36.patch
|
||||||
BuildRequires: autoconf-archive
|
BuildRequires: autoconf-archive
|
||||||
BuildRequires: automake
|
BuildRequires: automake
|
||||||
BuildRequires: fdupes
|
BuildRequires: fdupes
|
||||||
@ -219,6 +221,9 @@ BuildRequires: mpdecimal-devel
|
|||||||
BuildRequires: python3-Sphinx >= 4.0.0
|
BuildRequires: python3-Sphinx >= 4.0.0
|
||||||
%if 0%{?suse_version} >= 1500
|
%if 0%{?suse_version} >= 1500
|
||||||
BuildRequires: python3-python-docs-theme >= 2022.1
|
BuildRequires: python3-python-docs-theme >= 2022.1
|
||||||
|
%if 0%{?suse_version} < 1599
|
||||||
|
BuildRequires: python3-dataclasses
|
||||||
|
%endif
|
||||||
%endif
|
%endif
|
||||||
%endif
|
%endif
|
||||||
%if %{with general}
|
%if %{with general}
|
||||||
@ -442,8 +447,7 @@ This package contains libpython3.2 shared library for embedding in
|
|||||||
other applications.
|
other applications.
|
||||||
|
|
||||||
%prep
|
%prep
|
||||||
%setup -q -n %{tarname}
|
%autosetup -p1 -n %{tarname}
|
||||||
%autopatch -p1
|
|
||||||
|
|
||||||
# Fix devhelp doc build gh#python/cpython#120150
|
# Fix devhelp doc build gh#python/cpython#120150
|
||||||
echo "master_doc = 'contents'" >> Doc/conf.py
|
echo "master_doc = 'contents'" >> Doc/conf.py
|
||||||
@ -480,7 +484,7 @@ rm Lib/site-packages/README.txt
|
|||||||
tar xvf %{SOURCE21}
|
tar xvf %{SOURCE21}
|
||||||
|
|
||||||
# Don't fail on warnings when building documentation
|
# Don't fail on warnings when building documentation
|
||||||
# sed -i -e '/^SPHINXERRORHANDLING/s/-W//' Doc/Makefile
|
sed -i -e '/^SPHINXERRORHANDLING/s/-W//' Doc/Makefile
|
||||||
|
|
||||||
%build
|
%build
|
||||||
%if %{with doc}
|
%if %{with doc}
|
||||||
@ -729,7 +733,7 @@ rm %{buildroot}%{_libdir}/libpython3.so
|
|||||||
rm %{buildroot}%{_libdir}/pkgconfig/{python3,python3-embed}.pc
|
rm %{buildroot}%{_libdir}/pkgconfig/{python3,python3-embed}.pc
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
%if %{suse_version} > 1550
|
%if %{with externally_managed}
|
||||||
# PEP-0668 mark this as a distro maintained python
|
# PEP-0668 mark this as a distro maintained python
|
||||||
sed -e 's,__PYTHONPREFIX__,%{python_pkg_name},' -e 's,__PYTHON__,python%{python_version},' < %{SOURCE4} > %{buildroot}%{sitedir}/EXTERNALLY-MANAGED
|
sed -e 's,__PYTHONPREFIX__,%{python_pkg_name},' -e 's,__PYTHON__,python%{python_version},' < %{SOURCE4} > %{buildroot}%{sitedir}/EXTERNALLY-MANAGED
|
||||||
%endif
|
%endif
|
||||||
@ -751,7 +755,7 @@ rm %{buildroot}%{_bindir}/2to3
|
|||||||
# documentation
|
# documentation
|
||||||
export PDOCS=%{buildroot}%{_docdir}/%{name}
|
export PDOCS=%{buildroot}%{_docdir}/%{name}
|
||||||
install -d -m 755 $PDOCS
|
install -d -m 755 $PDOCS
|
||||||
install -c -m 644 %{SOURCE3} $PDOCS/
|
install -c -m 644 %{SOURCE4} $PDOCS/
|
||||||
install -c -m 644 README.rst $PDOCS/
|
install -c -m 644 README.rst $PDOCS/
|
||||||
|
|
||||||
# tools
|
# tools
|
||||||
@ -770,6 +774,9 @@ install -m 755 -D Tools/gdb/libpython.py %{buildroot}%{_datadir}/gdb/auto-load/%
|
|||||||
# install devel files to /config
|
# install devel files to /config
|
||||||
#cp Makefile Makefile.pre.in Makefile.pre $RPM_BUILD_ROOT%{sitedir}/config-%{python_abi}/
|
#cp Makefile Makefile.pre.in Makefile.pre $RPM_BUILD_ROOT%{sitedir}/config-%{python_abi}/
|
||||||
|
|
||||||
|
# Remove -IVendor/ from python-config boo#1231795
|
||||||
|
sed -i 's/-IVendor\///' %{buildroot}%{_bindir}/python%{python_abi}-config
|
||||||
|
|
||||||
# RPM macros
|
# RPM macros
|
||||||
%if %{primary_interpreter}
|
%if %{primary_interpreter}
|
||||||
mkdir -p %{buildroot}%{_rpmconfigdir}/macros.d/
|
mkdir -p %{buildroot}%{_rpmconfigdir}/macros.d/
|
||||||
@ -801,6 +808,11 @@ LD_LIBRARY_PATH=. ./python -O -c "from py_compile import compile; compile('$FAIL
|
|||||||
echo %{sitedir}/_import_failed > %{buildroot}/%{sitedir}/site-packages/zzzz-import-failed-hooks.pth
|
echo %{sitedir}/_import_failed > %{buildroot}/%{sitedir}/site-packages/zzzz-import-failed-hooks.pth
|
||||||
%endif
|
%endif
|
||||||
|
|
||||||
|
# For the purposes of reproducibility, it is necessary to eliminate any *.pyc files inside documentation dirs
|
||||||
|
if [ -d %{buildroot}%{_defaultdocdir} ] ; then
|
||||||
|
find %{buildroot}%{_defaultdocdir} -type f -name \*.pyc -ls -exec rm -vf '{}' \;
|
||||||
|
fi
|
||||||
|
|
||||||
%if %{with general}
|
%if %{with general}
|
||||||
%files -n %{python_pkg_name}-tk
|
%files -n %{python_pkg_name}-tk
|
||||||
%{sitedir}/tkinter
|
%{sitedir}/tkinter
|
||||||
@ -919,7 +931,7 @@ echo %{sitedir}/_import_failed > %{buildroot}/%{sitedir}/site-packages/zzzz-impo
|
|||||||
%{_mandir}/man1/python3.1%{?ext_man}
|
%{_mandir}/man1/python3.1%{?ext_man}
|
||||||
%endif
|
%endif
|
||||||
%{_mandir}/man1/python%{python_version}.1%{?ext_man}
|
%{_mandir}/man1/python%{python_version}.1%{?ext_man}
|
||||||
%if %{suse_version} > 1550
|
%if %{with externally_managed}
|
||||||
# PEP-0668
|
# PEP-0668
|
||||||
%{sitedir}/EXTERNALLY-MANAGED
|
%{sitedir}/EXTERNALLY-MANAGED
|
||||||
%endif
|
%endif
|
||||||
|
Loading…
x
Reference in New Issue
Block a user