Sync from SUSE:SLFO:Main python311 revision 53c4704ee0d46cc41c82c848b1539f4e
This commit is contained in:
parent
9139e7eb83
commit
54900e3456
@ -1,469 +0,0 @@
|
||||
---
|
||||
Doc/library/email.utils.rst | 19 -
|
||||
Lib/email/utils.py | 151 +++++++-
|
||||
Lib/test/test_email/test_email.py | 187 +++++++++-
|
||||
Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst | 8
|
||||
4 files changed, 344 insertions(+), 21 deletions(-)
|
||||
|
||||
Index: Python-3.11.8/Doc/library/email.utils.rst
|
||||
===================================================================
|
||||
--- Python-3.11.8.orig/Doc/library/email.utils.rst
|
||||
+++ Python-3.11.8/Doc/library/email.utils.rst
|
||||
@@ -60,13 +60,18 @@ of the new API.
|
||||
begins with angle brackets, they are stripped off.
|
||||
|
||||
|
||||
-.. function:: parseaddr(address)
|
||||
+.. function:: parseaddr(address, *, strict=True)
|
||||
|
||||
Parse address -- which should be the value of some address-containing field such
|
||||
as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and
|
||||
*email address* parts. Returns a tuple of that information, unless the parse
|
||||
fails, in which case a 2-tuple of ``('', '')`` is returned.
|
||||
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: formataddr(pair, charset='utf-8')
|
||||
|
||||
@@ -84,12 +89,15 @@ of the new API.
|
||||
Added the *charset* option.
|
||||
|
||||
|
||||
-.. function:: getaddresses(fieldvalues)
|
||||
+.. function:: getaddresses(fieldvalues, *, strict=True)
|
||||
|
||||
This method returns a list of 2-tuples of the form returned by ``parseaddr()``.
|
||||
*fieldvalues* is a sequence of header field values as might be returned by
|
||||
- :meth:`Message.get_all <email.message.Message.get_all>`. Here's a simple
|
||||
- example that gets all the recipients of a message::
|
||||
+ :meth:`Message.get_all <email.message.Message.get_all>`.
|
||||
+
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ Here's a simple example that gets all the recipients of a message::
|
||||
|
||||
from email.utils import getaddresses
|
||||
|
||||
@@ -99,6 +107,9 @@ of the new API.
|
||||
resent_ccs = msg.get_all('resent-cc', [])
|
||||
all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
|
||||
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: parsedate(date)
|
||||
|
||||
Index: Python-3.11.8/Lib/email/utils.py
|
||||
===================================================================
|
||||
--- Python-3.11.8.orig/Lib/email/utils.py
|
||||
+++ Python-3.11.8/Lib/email/utils.py
|
||||
@@ -48,6 +48,7 @@ TICK = "'"
|
||||
specialsre = re.compile(r'[][\\()<>@,:;".]')
|
||||
escapesre = re.compile(r'[\\"]')
|
||||
|
||||
+
|
||||
def _has_surrogates(s):
|
||||
"""Return True if s may contain surrogate-escaped binary data."""
|
||||
# This check is based on the fact that unless there are surrogates, utf8
|
||||
@@ -106,12 +107,127 @@ def formataddr(pair, charset='utf-8'):
|
||||
return address
|
||||
|
||||
|
||||
+def _iter_escaped_chars(addr):
|
||||
+ pos = 0
|
||||
+ escape = False
|
||||
+ for pos, ch in enumerate(addr):
|
||||
+ if escape:
|
||||
+ yield (pos, '\\' + ch)
|
||||
+ escape = False
|
||||
+ elif ch == '\\':
|
||||
+ escape = True
|
||||
+ else:
|
||||
+ yield (pos, ch)
|
||||
+ if escape:
|
||||
+ yield (pos, '\\')
|
||||
+
|
||||
+
|
||||
+def _strip_quoted_realnames(addr):
|
||||
+ """Strip real names between quotes."""
|
||||
+ if '"' not in addr:
|
||||
+ # Fast path
|
||||
+ return addr
|
||||
+
|
||||
+ start = 0
|
||||
+ open_pos = None
|
||||
+ result = []
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '"':
|
||||
+ if open_pos is None:
|
||||
+ open_pos = pos
|
||||
+ else:
|
||||
+ if start != open_pos:
|
||||
+ result.append(addr[start:open_pos])
|
||||
+ start = pos + 1
|
||||
+ open_pos = None
|
||||
|
||||
-def getaddresses(fieldvalues):
|
||||
- """Return a list of (REALNAME, EMAIL) for each fieldvalue."""
|
||||
- all = COMMASPACE.join(str(v) for v in fieldvalues)
|
||||
- a = _AddressList(all)
|
||||
- return a.addresslist
|
||||
+ if start < len(addr):
|
||||
+ result.append(addr[start:])
|
||||
+
|
||||
+ return ''.join(result)
|
||||
+
|
||||
+
|
||||
+supports_strict_parsing = True
|
||||
+
|
||||
+def getaddresses(fieldvalues, *, strict=True):
|
||||
+ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue.
|
||||
+
|
||||
+ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in
|
||||
+ its place.
|
||||
+
|
||||
+ If strict is true, use a strict parser which rejects malformed inputs.
|
||||
+ """
|
||||
+
|
||||
+ # If strict is true, if the resulting list of parsed addresses is greater
|
||||
+ # than the number of fieldvalues in the input list, a parsing error has
|
||||
+ # occurred and consequently a list containing a single empty 2-tuple [('',
|
||||
+ # '')] is returned in its place. This is done to avoid invalid output.
|
||||
+ #
|
||||
+ # Malformed input: getaddresses(['alice@example.com <bob@example.com>'])
|
||||
+ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')]
|
||||
+ # Safe output: [('', '')]
|
||||
+
|
||||
+ if not strict:
|
||||
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
|
||||
+ a = _AddressList(all)
|
||||
+ return a.addresslist
|
||||
+
|
||||
+ fieldvalues = [str(v) for v in fieldvalues]
|
||||
+ fieldvalues = _pre_parse_validation(fieldvalues)
|
||||
+ addr = COMMASPACE.join(fieldvalues)
|
||||
+ a = _AddressList(addr)
|
||||
+ result = _post_parse_validation(a.addresslist)
|
||||
+
|
||||
+ # Treat output as invalid if the number of addresses is not equal to the
|
||||
+ # expected number of addresses.
|
||||
+ n = 0
|
||||
+ for v in fieldvalues:
|
||||
+ # When a comma is used in the Real Name part it is not a deliminator.
|
||||
+ # So strip those out before counting the commas.
|
||||
+ v = _strip_quoted_realnames(v)
|
||||
+ # Expected number of addresses: 1 + number of commas
|
||||
+ n += 1 + v.count(',')
|
||||
+ if len(result) != n:
|
||||
+ return [('', '')]
|
||||
+
|
||||
+ return result
|
||||
+
|
||||
+
|
||||
+def _check_parenthesis(addr):
|
||||
+ # Ignore parenthesis in quoted real names.
|
||||
+ addr = _strip_quoted_realnames(addr)
|
||||
+
|
||||
+ opens = 0
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '(':
|
||||
+ opens += 1
|
||||
+ elif ch == ')':
|
||||
+ opens -= 1
|
||||
+ if opens < 0:
|
||||
+ return False
|
||||
+ return (opens == 0)
|
||||
+
|
||||
+
|
||||
+def _pre_parse_validation(email_header_fields):
|
||||
+ accepted_values = []
|
||||
+ for v in email_header_fields:
|
||||
+ if not _check_parenthesis(v):
|
||||
+ v = "('', '')"
|
||||
+ accepted_values.append(v)
|
||||
+
|
||||
+ return accepted_values
|
||||
+
|
||||
+
|
||||
+def _post_parse_validation(parsed_email_header_tuples):
|
||||
+ accepted_values = []
|
||||
+ # The parser would have parsed a correctly formatted domain-literal
|
||||
+ # The existence of an [ after parsing indicates a parsing failure
|
||||
+ for v in parsed_email_header_tuples:
|
||||
+ if '[' in v[1]:
|
||||
+ v = ('', '')
|
||||
+ accepted_values.append(v)
|
||||
+
|
||||
+ return accepted_values
|
||||
|
||||
|
||||
def _format_timetuple_and_zone(timetuple, zone):
|
||||
@@ -205,16 +321,33 @@ def parsedate_to_datetime(data):
|
||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))
|
||||
|
||||
|
||||
-def parseaddr(addr):
|
||||
+def parseaddr(addr, *, strict=True):
|
||||
"""
|
||||
Parse addr into its constituent realname and email address parts.
|
||||
|
||||
Return a tuple of realname and email address, unless the parse fails, in
|
||||
which case return a 2-tuple of ('', '').
|
||||
+
|
||||
+ If strict is True, use a strict parser which rejects malformed inputs.
|
||||
"""
|
||||
- addrs = _AddressList(addr).addresslist
|
||||
- if not addrs:
|
||||
- return '', ''
|
||||
+ if not strict:
|
||||
+ addrs = _AddressList(addr).addresslist
|
||||
+ if not addrs:
|
||||
+ return ('', '')
|
||||
+ return addrs[0]
|
||||
+
|
||||
+ if isinstance(addr, list):
|
||||
+ addr = addr[0]
|
||||
+
|
||||
+ if not isinstance(addr, str):
|
||||
+ return ('', '')
|
||||
+
|
||||
+ addr = _pre_parse_validation([addr])[0]
|
||||
+ addrs = _post_parse_validation(_AddressList(addr).addresslist)
|
||||
+
|
||||
+ if not addrs or len(addrs) > 1:
|
||||
+ return ('', '')
|
||||
+
|
||||
return addrs[0]
|
||||
|
||||
|
||||
Index: Python-3.11.8/Lib/test/test_email/test_email.py
|
||||
===================================================================
|
||||
--- Python-3.11.8.orig/Lib/test/test_email/test_email.py
|
||||
+++ Python-3.11.8/Lib/test/test_email/test_email.py
|
||||
@@ -17,6 +17,7 @@ from unittest.mock import patch
|
||||
|
||||
import email
|
||||
import email.policy
|
||||
+import email.utils
|
||||
|
||||
from email.charset import Charset
|
||||
from email.generator import Generator, DecodedGenerator, BytesGenerator
|
||||
@@ -3321,15 +3322,137 @@ Foo
|
||||
[('Al Person', 'aperson@dom.ain'),
|
||||
('Bud Person', 'bperson@dom.ain')])
|
||||
|
||||
+ def test_parsing_errors(self):
|
||||
+ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056"""
|
||||
+ alice = 'alice@example.org'
|
||||
+ bob = 'bob@example.com'
|
||||
+ empty = ('', '')
|
||||
+
|
||||
+ # Test utils.getaddresses() and utils.parseaddr() on malformed email
|
||||
+ # addresses: default behavior (strict=True) rejects malformed address,
|
||||
+ # and strict=False which tolerates malformed address.
|
||||
+ for invalid_separator, expected_non_strict in (
|
||||
+ ('(', [(f'<{bob}>', alice)]),
|
||||
+ (')', [('', alice), empty, ('', bob)]),
|
||||
+ ('<', [('', alice), empty, ('', bob), empty]),
|
||||
+ ('>', [('', alice), empty, ('', bob)]),
|
||||
+ ('[', [('', f'{alice}[<{bob}>]')]),
|
||||
+ (']', [('', alice), empty, ('', bob)]),
|
||||
+ ('@', [empty, empty, ('', bob)]),
|
||||
+ (';', [('', alice), empty, ('', bob)]),
|
||||
+ (':', [('', alice), ('', bob)]),
|
||||
+ ('.', [('', alice + '.'), ('', bob)]),
|
||||
+ ('"', [('', alice), ('', f'<{bob}>')]),
|
||||
+ ):
|
||||
+ address = f'{alice}{invalid_separator}<{bob}>'
|
||||
+ with self.subTest(address=address):
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ expected_non_strict)
|
||||
+
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Comma (',') is treated differently depending on strict parameter.
|
||||
+ # Comma without quotes.
|
||||
+ address = f'{alice},<{bob}>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Real name between quotes containing comma.
|
||||
+ address = '"Alice, alice@example.org" <bob@example.com>'
|
||||
+ expected_strict = ('Alice, alice@example.org', 'bob@example.com')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Valid parenthesis in comments.
|
||||
+ address = 'alice@example.org (Alice)'
|
||||
+ expected_strict = ('Alice', 'alice@example.org')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Invalid parenthesis in comments.
|
||||
+ address = 'alice@example.org )Alice('
|
||||
+ self.assertEqual(utils.getaddresses([address]), [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', 'alice@example.org'), ('', ''), ('', 'Alice')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Two addresses with quotes separated by comma.
|
||||
+ address = '"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Test email.utils.supports_strict_parsing attribute
|
||||
+ self.assertEqual(email.utils.supports_strict_parsing, True)
|
||||
+
|
||||
def test_getaddresses_nasty(self):
|
||||
- eq = self.assertEqual
|
||||
- eq(utils.getaddresses(['foo: ;']), [('', '')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['[]*-- =~$']),
|
||||
- [('', ''), ('', ''), ('', '*--')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
|
||||
- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
|
||||
+ for addresses, expected in (
|
||||
+ (['"Sürname, Firstname" <to@example.com>'],
|
||||
+ [('Sürname, Firstname', 'to@example.com')]),
|
||||
+
|
||||
+ (['foo: ;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>'],
|
||||
+ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]),
|
||||
+
|
||||
+ ([r'Pete(A nice \) chap) <pete(his account)@silly.test(his host)>'],
|
||||
+ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]),
|
||||
+
|
||||
+ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'],
|
||||
+ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]),
|
||||
+
|
||||
+ (['John Doe <jdoe@machine(comment). example>'],
|
||||
+ [('John Doe (comment)', 'jdoe@machine.example')]),
|
||||
+
|
||||
+ (['"Mary Smith: Personal Account" <smith@home.example>'],
|
||||
+ [('Mary Smith: Personal Account', 'smith@home.example')]),
|
||||
+
|
||||
+ (['Undisclosed recipients:;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ ([r'<boss@nil.test>, "Giant; \"Big\" Box" <bob@example.net>'],
|
||||
+ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]),
|
||||
+ ):
|
||||
+ with self.subTest(addresses=addresses):
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ expected)
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ expected)
|
||||
+
|
||||
+ addresses = ['[]*-- =~$']
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ [('', '')])
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ [('', ''), ('', ''), ('', '*--')])
|
||||
|
||||
def test_getaddresses_embedded_comment(self):
|
||||
"""Test proper handling of a nested comment"""
|
||||
@@ -3520,6 +3643,54 @@ multipart/report
|
||||
m = cls(*constructor, policy=email.policy.default)
|
||||
self.assertIs(m.policy, email.policy.default)
|
||||
|
||||
+ def test_iter_escaped_chars(self):
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')),
|
||||
+ [(0, 'a'),
|
||||
+ (2, '\\\\'),
|
||||
+ (3, 'b'),
|
||||
+ (5, '\\"'),
|
||||
+ (6, 'c'),
|
||||
+ (8, '\\\\'),
|
||||
+ (9, '"'),
|
||||
+ (10, 'd')])
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars('a\\')),
|
||||
+ [(0, 'a'), (1, '\\')])
|
||||
+
|
||||
+ def test_strip_quoted_realnames(self):
|
||||
+ def check(addr, expected):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), expected)
|
||||
+
|
||||
+ check('"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>',
|
||||
+ ' <jane@example.net>, <john@example.net>')
|
||||
+ check(r'"Jane \"Doe\"." <jane@example.net>',
|
||||
+ ' <jane@example.net>')
|
||||
+
|
||||
+ # special cases
|
||||
+ check(r'before"name"after', 'beforeafter')
|
||||
+ check(r'before"name"', 'before')
|
||||
+ check(r'b"name"', 'b') # single char
|
||||
+ check(r'"name"after', 'after')
|
||||
+ check(r'"name"a', 'a') # single char
|
||||
+ check(r'"name"', '')
|
||||
+
|
||||
+ # no change
|
||||
+ for addr in (
|
||||
+ 'Jane Doe <jane@example.net>, John Doe <john@example.net>',
|
||||
+ 'lone " quote',
|
||||
+ ):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), addr)
|
||||
+
|
||||
+
|
||||
+ def test_check_parenthesis(self):
|
||||
+ addr = 'alice@example.net'
|
||||
+ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice('))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)'))
|
||||
+
|
||||
+ # Ignore real name between quotes
|
||||
+ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}'))
|
||||
+
|
||||
|
||||
# Test the iterator/generators
|
||||
class TestIterators(TestEmailBase):
|
||||
Index: Python-3.11.8/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
===================================================================
|
||||
--- /dev/null
|
||||
+++ Python-3.11.8/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
@@ -0,0 +1,8 @@
|
||||
+:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now
|
||||
+return ``('', '')`` 2-tuples in more situations where invalid email
|
||||
+addresses are encountered instead of potentially inaccurate values. Add
|
||||
+optional *strict* parameter to these two functions: use ``strict=False`` to
|
||||
+get the old behavior, accept malformed inputs.
|
||||
+``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check
|
||||
+if the *strict* paramater is available. Patch by Thomas Dwyer and Victor
|
||||
+Stinner to improve the CVE-2023-27043 fix.
|
@ -1,34 +1,49 @@
|
||||
From 7da97f61816f3cadaa6788804b22a2434b40e8c5 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Mon, 21 Feb 2022 08:16:09 -0800
|
||||
Subject: [PATCH] bpo-46811: Make test suite support Expat >=2.4.5 (GH-31453)
|
||||
(GH-31472)
|
||||
|
||||
Curly brackets were never allowed in namespace URIs
|
||||
according to RFC 3986, and so-called namespace-validating
|
||||
XML parsers have the right to reject them a invalid URIs.
|
||||
|
||||
libexpat >=2.4.5 has become strcter in that regard due to
|
||||
related security issues; with ET.XML instantiating a
|
||||
namespace-aware parser under the hood, this test has no
|
||||
future in CPython.
|
||||
|
||||
References:
|
||||
- https://datatracker.ietf.org/doc/html/rfc3968
|
||||
- https://www.w3.org/TR/xml-names/
|
||||
|
||||
Also, test_minidom.py: Support Expat >=2.4.5
|
||||
(cherry picked from commit 2cae93832f46b245847bdc252456ddf7742ef45e)
|
||||
|
||||
Co-authored-by: Sebastian Pipping <sebastian@pipping.org>
|
||||
---
|
||||
Lib/test/test_minidom.py | 23 +---
|
||||
Lib/test/test_xml_etree.py | 50 +++++-----
|
||||
Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst | 2
|
||||
3 files changed, 39 insertions(+), 36 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2022-02-20-21-03-31.bpo-46811.8BxgdQ.rst
|
||||
Lib/test/support/__init__.py | 16 ++++++++++++++--
|
||||
Lib/test/test_minidom.py | 23 +++++++++--------------
|
||||
Lib/test/test_pyexpat.py | 12 +++++-------
|
||||
Lib/test/test_sax.py | 18 +++++++++---------
|
||||
Lib/test/test_xml_etree.py | 12 ------------
|
||||
5 files changed, 37 insertions(+), 44 deletions(-)
|
||||
|
||||
--- a/Lib/test/support/__init__.py
|
||||
+++ b/Lib/test/support/__init__.py
|
||||
@@ -8,6 +8,7 @@ import dataclasses
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
+import pyexpat
|
||||
import stat
|
||||
import sys
|
||||
import sysconfig
|
||||
@@ -56,7 +57,7 @@ __all__ = [
|
||||
"run_with_tz", "PGO", "missing_compiler_executable",
|
||||
"ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST",
|
||||
"LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT",
|
||||
- "skip_on_s390x",
|
||||
+ "skip_on_s390x", "fails_with_expat_2_6_0", "is_expat_2_6_0"
|
||||
]
|
||||
|
||||
|
||||
@@ -2240,6 +2241,17 @@ def copy_python_src_ignore(path, names):
|
||||
}
|
||||
return ignored
|
||||
|
||||
-#Windows doesn't have os.uname() but it doesn't support s390x.
|
||||
+
|
||||
+# Windows doesn't have os.uname() but it doesn't support s390x.
|
||||
skip_on_s390x = unittest.skipIf(hasattr(os, 'uname') and os.uname().machine == 's390x',
|
||||
'skipped on s390x')
|
||||
+
|
||||
+
|
||||
+@functools.lru_cache
|
||||
+def _is_expat_2_6_0():
|
||||
+ return hasattr(pyexpat.ParserCreate(), 'SetReparseDeferralEnabled')
|
||||
+is_expat_2_6_0 = _is_expat_2_6_0()
|
||||
+
|
||||
+fails_with_expat_2_6_0 = (unittest.expectedFailure
|
||||
+ if is_expat_2_6_0
|
||||
+ else lambda test: test)
|
||||
--- a/Lib/test/test_minidom.py
|
||||
+++ b/Lib/test/test_minidom.py
|
||||
@@ -6,7 +6,6 @@ import io
|
||||
@ -75,81 +90,134 @@ Co-authored-by: Sebastian Pipping <sebastian@pipping.org>
|
||||
parseString('<element xmlns:abc="http:abc.com/de f g/hi/j k"><abc:foo /></element>')
|
||||
|
||||
def testDocRemoveChild(self):
|
||||
--- a/Lib/test/test_pyexpat.py
|
||||
+++ b/Lib/test/test_pyexpat.py
|
||||
@@ -14,8 +14,7 @@ from test.support import os_helper
|
||||
from xml.parsers import expat
|
||||
from xml.parsers.expat import errors
|
||||
|
||||
-from test.support import sortdict, is_emscripten, is_wasi
|
||||
-
|
||||
+from test.support import sortdict, is_emscripten, is_wasi, is_expat_2_6_0
|
||||
|
||||
class SetAttributeTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -770,9 +769,8 @@ class ReparseDeferralTest(unittest.TestC
|
||||
self.assertIs(parser.GetReparseDeferralEnabled(), enabled)
|
||||
|
||||
def test_reparse_deferral_enabled(self):
|
||||
- if expat.version_info < (2, 6, 0):
|
||||
- self.skipTest(f'Expat {expat.version_info} does not '
|
||||
- 'support reparse deferral')
|
||||
+ if not is_expat_2_6_0:
|
||||
+ self.skipTest("Linked libexpat doesn't support reparse deferral")
|
||||
|
||||
started = []
|
||||
|
||||
@@ -801,9 +799,9 @@ class ReparseDeferralTest(unittest.TestC
|
||||
|
||||
parser = expat.ParserCreate()
|
||||
parser.StartElementHandler = start_element
|
||||
- if expat.version_info >= (2, 6, 0):
|
||||
+ if is_expat_2_6_0:
|
||||
parser.SetReparseDeferralEnabled(False)
|
||||
- self.assertFalse(parser.GetReparseDeferralEnabled())
|
||||
+ self.assertFalse(parser.GetReparseDeferralEnabled())
|
||||
|
||||
for chunk in (b'<doc', b'/>'):
|
||||
parser.Parse(chunk, False)
|
||||
--- a/Lib/test/test_sax.py
|
||||
+++ b/Lib/test/test_sax.py
|
||||
@@ -19,13 +19,11 @@ from xml.sax.xmlreader import InputSourc
|
||||
from io import BytesIO, StringIO
|
||||
import codecs
|
||||
import os.path
|
||||
-import pyexpat
|
||||
import shutil
|
||||
import sys
|
||||
from urllib.error import URLError
|
||||
import urllib.request
|
||||
-from test.support import os_helper
|
||||
-from test.support import findfile
|
||||
+from test.support import os_helper, findfile, is_expat_2_6_0
|
||||
from test.support.os_helper import FakePath, TESTFN
|
||||
|
||||
|
||||
@@ -1215,10 +1213,10 @@ class ExpatReaderTest(XmlTestBase):
|
||||
|
||||
self.assertEqual(result.getvalue(), start + b"<doc>text</doc>")
|
||||
|
||||
- @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
||||
- f'Expat {pyexpat.version_info} does not '
|
||||
- 'support reparse deferral')
|
||||
def test_flush_reparse_deferral_enabled(self):
|
||||
+ if not is_expat_2_6_0:
|
||||
+ self.skipTest("Linked libexpat doesn't support reparse deferral")
|
||||
+
|
||||
result = BytesIO()
|
||||
xmlgen = XMLGenerator(result)
|
||||
parser = create_parser()
|
||||
@@ -1241,6 +1239,9 @@ class ExpatReaderTest(XmlTestBase):
|
||||
self.assertEqual(result.getvalue(), start + b"<doc></doc>")
|
||||
|
||||
def test_flush_reparse_deferral_disabled(self):
|
||||
+ if not is_expat_2_6_0:
|
||||
+ self.skipTest("Linked libexpat doesn't support reparse deferral")
|
||||
+
|
||||
result = BytesIO()
|
||||
xmlgen = XMLGenerator(result)
|
||||
parser = create_parser()
|
||||
@@ -1249,9 +1250,8 @@ class ExpatReaderTest(XmlTestBase):
|
||||
for chunk in ("<doc", ">"):
|
||||
parser.feed(chunk)
|
||||
|
||||
- if pyexpat.version_info >= (2, 6, 0):
|
||||
- parser._parser.SetReparseDeferralEnabled(False)
|
||||
- self.assertEqual(result.getvalue(), start) # i.e. no elements started
|
||||
+ parser._parser.SetReparseDeferralEnabled(False)
|
||||
+ self.assertEqual(result.getvalue(), start) # i.e. no elements started
|
||||
|
||||
self.assertFalse(parser._parser.GetReparseDeferralEnabled())
|
||||
|
||||
--- a/Lib/test/test_xml_etree.py
|
||||
+++ b/Lib/test/test_xml_etree.py
|
||||
@@ -13,6 +13,7 @@ import itertools
|
||||
@@ -13,7 +13,6 @@ import itertools
|
||||
import operator
|
||||
import os
|
||||
import pickle
|
||||
+import pyexpat
|
||||
-import pyexpat
|
||||
import sys
|
||||
import textwrap
|
||||
import types
|
||||
@@ -120,6 +121,10 @@ ATTLIST_XML = """\
|
||||
</foo>
|
||||
"""
|
||||
@@ -1424,12 +1423,6 @@ class XMLPullParserTest(unittest.TestCas
|
||||
self.assert_event_tags(parser, [('end', 'root')])
|
||||
self.assertIsNone(parser.close())
|
||||
|
||||
+fails_with_expat_2_6_0 = (unittest.expectedFailure
|
||||
+ if pyexpat.version_info >= (2, 6, 0) else
|
||||
+ lambda test: test)
|
||||
+
|
||||
def checkwarnings(*filters, quiet=False):
|
||||
def decorator(test):
|
||||
def newtest(*args, **kwargs):
|
||||
@@ -1400,28 +1405,29 @@ class XMLPullParserTest(unittest.TestCas
|
||||
self.assertEqual([(action, elem.tag) for action, elem in events],
|
||||
expected)
|
||||
- def test_simple_xml_chunk_1(self):
|
||||
- self.test_simple_xml(chunk_size=1, flush=True)
|
||||
-
|
||||
- def test_simple_xml_chunk_5(self):
|
||||
- self.test_simple_xml(chunk_size=5, flush=True)
|
||||
-
|
||||
def test_simple_xml_chunk_22(self):
|
||||
self.test_simple_xml(chunk_size=22)
|
||||
|
||||
- def test_simple_xml(self):
|
||||
- for chunk_size in (None, 1, 5):
|
||||
- with self.subTest(chunk_size=chunk_size):
|
||||
- parser = ET.XMLPullParser()
|
||||
- self.assert_event_tags(parser, [])
|
||||
- self._feed(parser, "<!-- comment -->\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [])
|
||||
- self._feed(parser,
|
||||
- "<root>\n <element key='value'>text</element",
|
||||
- chunk_size)
|
||||
- self.assert_event_tags(parser, [])
|
||||
- self._feed(parser, ">\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [('end', 'element')])
|
||||
- self._feed(parser, "<element>text</element>tail\n", chunk_size)
|
||||
- self._feed(parser, "<empty-element/>\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [
|
||||
- ('end', 'element'),
|
||||
- ('end', 'empty-element'),
|
||||
- ])
|
||||
- self._feed(parser, "</root>\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [('end', 'root')])
|
||||
- self.assertIsNone(parser.close())
|
||||
+ def test_simple_xml(self, chunk_size=None):
|
||||
+ parser = ET.XMLPullParser()
|
||||
+ self.assert_event_tags(parser, [])
|
||||
+ self._feed(parser, "<!-- comment -->\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [])
|
||||
+ self._feed(parser,
|
||||
+ "<root>\n <element key='value'>text</element",
|
||||
+ chunk_size)
|
||||
+ self.assert_event_tags(parser, [])
|
||||
+ self._feed(parser, ">\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [('end', 'element')])
|
||||
+ self._feed(parser, "<element>text</element>tail\n", chunk_size)
|
||||
+ self._feed(parser, "<empty-element/>\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [
|
||||
+ ('end', 'element'),
|
||||
+ ('end', 'empty-element'),
|
||||
+ ])
|
||||
+ self._feed(parser, "</root>\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [('end', 'root')])
|
||||
+ self.assertIsNone(parser.close())
|
||||
+
|
||||
+ def test_simple_xml_chunk_22(self):
|
||||
+ self.test_simple_xml(chunk_size=22)
|
||||
@@ -1627,9 +1620,6 @@ class XMLPullParserTest(unittest.TestCas
|
||||
with self.assertRaises(ValueError):
|
||||
ET.XMLPullParser(events=('start', 'end', 'bogus'))
|
||||
|
||||
def test_feed_while_iterating(self):
|
||||
parser = ET.XMLPullParser()
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Fix tests for :class:`~xml.etree.ElementTree.XMLPullParser` with Expat
|
||||
+2.6.0.
|
||||
- @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
||||
- f'Expat {pyexpat.version_info} does not '
|
||||
- 'support reparse deferral')
|
||||
def test_flush_reparse_deferral_enabled(self):
|
||||
parser = ET.XMLPullParser(events=('start', 'end'))
|
||||
|
||||
@@ -1656,8 +1646,6 @@ class XMLPullParserTest(unittest.TestCas
|
||||
|
||||
for chunk in ("<doc", ">"):
|
||||
parser.feed(chunk)
|
||||
-
|
||||
- if pyexpat.version_info >= (2, 6, 0):
|
||||
if not ET is pyET:
|
||||
self.skipTest(f'XMLParser.(Get|Set)ReparseDeferralEnabled '
|
||||
'methods not available in C')
|
||||
|
60
CVE-2023-52425-remove-reparse_deferral-tests.patch
Normal file
60
CVE-2023-52425-remove-reparse_deferral-tests.patch
Normal file
@ -0,0 +1,60 @@
|
||||
---
|
||||
Lib/test/test_pyexpat.py | 2 ++
|
||||
Lib/test/test_sax.py | 2 ++
|
||||
Lib/test/test_xml_etree.py | 2 ++
|
||||
3 files changed, 6 insertions(+)
|
||||
|
||||
--- a/Lib/test/test_pyexpat.py
|
||||
+++ b/Lib/test/test_pyexpat.py
|
||||
@@ -768,6 +768,7 @@ class ReparseDeferralTest(unittest.TestC
|
||||
parser.SetReparseDeferralEnabled(True)
|
||||
self.assertIs(parser.GetReparseDeferralEnabled(), enabled)
|
||||
|
||||
+ @unittest.skip('Tests are failing.')
|
||||
def test_reparse_deferral_enabled(self):
|
||||
if not is_expat_2_6_0:
|
||||
self.skipTest("Linked libexpat doesn't support reparse deferral")
|
||||
@@ -791,6 +792,7 @@ class ReparseDeferralTest(unittest.TestC
|
||||
|
||||
self.assertEqual(started, ['doc'])
|
||||
|
||||
+ @unittest.skip('Tests are failing.')
|
||||
def test_reparse_deferral_disabled(self):
|
||||
started = []
|
||||
|
||||
--- a/Lib/test/test_sax.py
|
||||
+++ b/Lib/test/test_sax.py
|
||||
@@ -1213,6 +1213,7 @@ class ExpatReaderTest(XmlTestBase):
|
||||
|
||||
self.assertEqual(result.getvalue(), start + b"<doc>text</doc>")
|
||||
|
||||
+ @unittest.skip('Tests are failing.')
|
||||
def test_flush_reparse_deferral_enabled(self):
|
||||
if not is_expat_2_6_0:
|
||||
self.skipTest("Linked libexpat doesn't support reparse deferral")
|
||||
@@ -1238,6 +1239,7 @@ class ExpatReaderTest(XmlTestBase):
|
||||
|
||||
self.assertEqual(result.getvalue(), start + b"<doc></doc>")
|
||||
|
||||
+ @unittest.skip('Tests are failing.')
|
||||
def test_flush_reparse_deferral_disabled(self):
|
||||
if not is_expat_2_6_0:
|
||||
self.skipTest("Linked libexpat doesn't support reparse deferral")
|
||||
--- a/Lib/test/test_xml_etree.py
|
||||
+++ b/Lib/test/test_xml_etree.py
|
||||
@@ -1620,6 +1620,7 @@ class XMLPullParserTest(unittest.TestCas
|
||||
with self.assertRaises(ValueError):
|
||||
ET.XMLPullParser(events=('start', 'end', 'bogus'))
|
||||
|
||||
+ @unittest.skip('Tests are failing.')
|
||||
def test_flush_reparse_deferral_enabled(self):
|
||||
parser = ET.XMLPullParser(events=('start', 'end'))
|
||||
|
||||
@@ -1641,6 +1642,7 @@ class XMLPullParserTest(unittest.TestCas
|
||||
|
||||
self.assert_event_tags(parser, [('end', 'doc')])
|
||||
|
||||
+ @unittest.skip('Tests are failing.')
|
||||
def test_flush_reparse_deferral_disabled(self):
|
||||
parser = ET.XMLPullParser(events=('start', 'end'))
|
||||
|
@ -1,165 +0,0 @@
|
||||
---
|
||||
Lib/tempfile.py | 16 +
|
||||
Lib/test/test_tempfile.py | 113 ++++++++++
|
||||
Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst | 2
|
||||
3 files changed, 131 insertions(+)
|
||||
|
||||
--- a/Lib/tempfile.py
|
||||
+++ b/Lib/tempfile.py
|
||||
@@ -286,6 +286,22 @@ def _resetperms(path):
|
||||
_dont_follow_symlinks(chflags, path, 0)
|
||||
_dont_follow_symlinks(_os.chmod, path, 0o700)
|
||||
|
||||
+def _dont_follow_symlinks(func, path, *args):
|
||||
+ # Pass follow_symlinks=False, unless not supported on this platform.
|
||||
+ if func in _os.supports_follow_symlinks:
|
||||
+ func(path, *args, follow_symlinks=False)
|
||||
+ elif _os.name == 'nt' or not _os.path.islink(path):
|
||||
+ func(path, *args)
|
||||
+
|
||||
+def _resetperms(path):
|
||||
+ try:
|
||||
+ chflags = _os.chflags
|
||||
+ except AttributeError:
|
||||
+ pass
|
||||
+ else:
|
||||
+ _dont_follow_symlinks(chflags, path, 0)
|
||||
+ _dont_follow_symlinks(_os.chmod, path, 0o700)
|
||||
+
|
||||
|
||||
# User visible interfaces.
|
||||
|
||||
--- a/Lib/test/test_tempfile.py
|
||||
+++ b/Lib/test/test_tempfile.py
|
||||
@@ -1673,6 +1673,103 @@ class TestTemporaryDirectory(BaseTestCas
|
||||
new_flags = os.stat(dir1).st_flags
|
||||
self.assertEqual(new_flags, old_flags)
|
||||
|
||||
+ @os_helper.skip_unless_symlink
|
||||
+ def test_cleanup_with_symlink_modes(self):
|
||||
+ # cleanup() should not follow symlinks when fixing mode bits (#91133)
|
||||
+ with self.do_create(recurse=0) as d2:
|
||||
+ file1 = os.path.join(d2, 'file1')
|
||||
+ open(file1, 'wb').close()
|
||||
+ dir1 = os.path.join(d2, 'dir1')
|
||||
+ os.mkdir(dir1)
|
||||
+ for mode in range(8):
|
||||
+ mode <<= 6
|
||||
+ with self.subTest(mode=format(mode, '03o')):
|
||||
+ def test(target, target_is_directory):
|
||||
+ d1 = self.do_create(recurse=0)
|
||||
+ symlink = os.path.join(d1.name, 'symlink')
|
||||
+ os.symlink(target, symlink,
|
||||
+ target_is_directory=target_is_directory)
|
||||
+ try:
|
||||
+ os.chmod(symlink, mode, follow_symlinks=False)
|
||||
+ except NotImplementedError:
|
||||
+ pass
|
||||
+ try:
|
||||
+ os.chmod(symlink, mode)
|
||||
+ except FileNotFoundError:
|
||||
+ pass
|
||||
+ os.chmod(d1.name, mode)
|
||||
+ d1.cleanup()
|
||||
+ self.assertFalse(os.path.exists(d1.name))
|
||||
+
|
||||
+ with self.subTest('nonexisting file'):
|
||||
+ test('nonexisting', target_is_directory=False)
|
||||
+ with self.subTest('nonexisting dir'):
|
||||
+ test('nonexisting', target_is_directory=True)
|
||||
+
|
||||
+ with self.subTest('existing file'):
|
||||
+ os.chmod(file1, mode)
|
||||
+ old_mode = os.stat(file1).st_mode
|
||||
+ test(file1, target_is_directory=False)
|
||||
+ new_mode = os.stat(file1).st_mode
|
||||
+ self.assertEqual(new_mode, old_mode,
|
||||
+ '%03o != %03o' % (new_mode, old_mode))
|
||||
+
|
||||
+ with self.subTest('existing dir'):
|
||||
+ os.chmod(dir1, mode)
|
||||
+ old_mode = os.stat(dir1).st_mode
|
||||
+ test(dir1, target_is_directory=True)
|
||||
+ new_mode = os.stat(dir1).st_mode
|
||||
+ self.assertEqual(new_mode, old_mode,
|
||||
+ '%03o != %03o' % (new_mode, old_mode))
|
||||
+
|
||||
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
|
||||
+ @os_helper.skip_unless_symlink
|
||||
+ def test_cleanup_with_symlink_flags(self):
|
||||
+ # cleanup() should not follow symlinks when fixing flags (#91133)
|
||||
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
|
||||
+ self.check_flags(flags)
|
||||
+
|
||||
+ with self.do_create(recurse=0) as d2:
|
||||
+ file1 = os.path.join(d2, 'file1')
|
||||
+ open(file1, 'wb').close()
|
||||
+ dir1 = os.path.join(d2, 'dir1')
|
||||
+ os.mkdir(dir1)
|
||||
+ def test(target, target_is_directory):
|
||||
+ d1 = self.do_create(recurse=0)
|
||||
+ symlink = os.path.join(d1.name, 'symlink')
|
||||
+ os.symlink(target, symlink,
|
||||
+ target_is_directory=target_is_directory)
|
||||
+ try:
|
||||
+ os.chflags(symlink, flags, follow_symlinks=False)
|
||||
+ except NotImplementedError:
|
||||
+ pass
|
||||
+ try:
|
||||
+ os.chflags(symlink, flags)
|
||||
+ except FileNotFoundError:
|
||||
+ pass
|
||||
+ os.chflags(d1.name, flags)
|
||||
+ d1.cleanup()
|
||||
+ self.assertFalse(os.path.exists(d1.name))
|
||||
+
|
||||
+ with self.subTest('nonexisting file'):
|
||||
+ test('nonexisting', target_is_directory=False)
|
||||
+ with self.subTest('nonexisting dir'):
|
||||
+ test('nonexisting', target_is_directory=True)
|
||||
+
|
||||
+ with self.subTest('existing file'):
|
||||
+ os.chflags(file1, flags)
|
||||
+ old_flags = os.stat(file1).st_flags
|
||||
+ test(file1, target_is_directory=False)
|
||||
+ new_flags = os.stat(file1).st_flags
|
||||
+ self.assertEqual(new_flags, old_flags)
|
||||
+
|
||||
+ with self.subTest('existing dir'):
|
||||
+ os.chflags(dir1, flags)
|
||||
+ old_flags = os.stat(dir1).st_flags
|
||||
+ test(dir1, target_is_directory=True)
|
||||
+ new_flags = os.stat(dir1).st_flags
|
||||
+ self.assertEqual(new_flags, old_flags)
|
||||
+
|
||||
@support.cpython_only
|
||||
def test_del_on_collection(self):
|
||||
# A TemporaryDirectory is deleted when garbage collected
|
||||
@@ -1847,6 +1944,22 @@ class TestTemporaryDirectory(BaseTestCas
|
||||
|
||||
def check_flags(self, flags):
|
||||
# skip the test if these flags are not supported (ex: FreeBSD 13)
|
||||
+ filename = os_helper.TESTFN
|
||||
+ try:
|
||||
+ open(filename, "w").close()
|
||||
+ try:
|
||||
+ os.chflags(filename, flags)
|
||||
+ except OSError as exc:
|
||||
+ # "OSError: [Errno 45] Operation not supported"
|
||||
+ self.skipTest(f"chflags() doesn't support flags "
|
||||
+ f"{flags:#b}: {exc}")
|
||||
+ else:
|
||||
+ os.chflags(filename, 0)
|
||||
+ finally:
|
||||
+ os_helper.unlink(filename)
|
||||
+
|
||||
+ def check_flags(self, flags):
|
||||
+ # skip the test if these flags are not supported (ex: FreeBSD 13)
|
||||
filename = os_helper.TESTFN
|
||||
try:
|
||||
open(filename, "w").close()
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Fix a bug in :class:`tempfile.TemporaryDirectory` cleanup, which now no longer
|
||||
+dereferences symlinks when working around file system permission errors.
|
@ -1,145 +0,0 @@
|
||||
From fa5c6d1c4b3e6556cddf663d7b36ed7cdbbde18c Mon Sep 17 00:00:00 2001
|
||||
From: David Benjamin <davidben@google.com>
|
||||
Date: Thu, 15 Feb 2024 19:24:51 -0500
|
||||
Subject: [PATCH] gh-114572: Fix locking in cert_store_stats and get_ca_certs
|
||||
(GH-114573)
|
||||
|
||||
* gh-114572: Fix locking in cert_store_stats and get_ca_certs
|
||||
|
||||
cert_store_stats and get_ca_certs query the SSLContext's X509_STORE with
|
||||
X509_STORE_get0_objects, but reading the result requires a lock. See
|
||||
https://github.com/openssl/openssl/pull/23224 for details.
|
||||
|
||||
Instead, use X509_STORE_get1_objects, newly added in that PR.
|
||||
X509_STORE_get1_objects does not exist in current OpenSSLs, but we can
|
||||
polyfill it with X509_STORE_lock and X509_STORE_unlock.
|
||||
|
||||
* Work around const-correctness problem
|
||||
|
||||
* Add missing X509_STORE_get1_objects failure check
|
||||
|
||||
* Add blurb
|
||||
(cherry picked from commit bce693111bff906ccf9281c22371331aaff766ab)
|
||||
|
||||
Co-authored-by: David Benjamin <davidben@google.com>
|
||||
---
|
||||
Misc/NEWS.d/next/Security/2024-01-26-22-14-09.gh-issue-114572.t1QMQD.rst | 4
|
||||
Modules/_ssl.c | 65 +++++++++-
|
||||
2 files changed, 64 insertions(+), 5 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2024-01-26-22-14-09.gh-issue-114572.t1QMQD.rst
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2024-01-26-22-14-09.gh-issue-114572.t1QMQD.rst
|
||||
@@ -0,0 +1,4 @@
|
||||
+:meth:`ssl.SSLContext.cert_store_stats` and
|
||||
+:meth:`ssl.SSLContext.get_ca_certs` now correctly lock access to the
|
||||
+certificate store, when the :class:`ssl.SSLContext` is shared across
|
||||
+multiple threads.
|
||||
--- a/Modules/_ssl.c
|
||||
+++ b/Modules/_ssl.c
|
||||
@@ -4529,6 +4529,50 @@ set_sni_callback(PySSLContext *self, PyO
|
||||
return 0;
|
||||
}
|
||||
|
||||
+#if OPENSSL_VERSION_NUMBER < 0x30300000L
|
||||
+static X509_OBJECT *x509_object_dup(const X509_OBJECT *obj)
|
||||
+{
|
||||
+ int ok;
|
||||
+ X509_OBJECT *ret = X509_OBJECT_new();
|
||||
+ if (ret == NULL) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ switch (X509_OBJECT_get_type(obj)) {
|
||||
+ case X509_LU_X509:
|
||||
+ ok = X509_OBJECT_set1_X509(ret, X509_OBJECT_get0_X509(obj));
|
||||
+ break;
|
||||
+ case X509_LU_CRL:
|
||||
+ /* X509_OBJECT_get0_X509_CRL was not const-correct prior to 3.0.*/
|
||||
+ ok = X509_OBJECT_set1_X509_CRL(
|
||||
+ ret, X509_OBJECT_get0_X509_CRL((X509_OBJECT *)obj));
|
||||
+ break;
|
||||
+ default:
|
||||
+ /* We cannot duplicate unrecognized types in a polyfill, but it is
|
||||
+ * safe to leave an empty object. The caller will ignore it. */
|
||||
+ ok = 1;
|
||||
+ break;
|
||||
+ }
|
||||
+ if (!ok) {
|
||||
+ X509_OBJECT_free(ret);
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ return ret;
|
||||
+}
|
||||
+
|
||||
+static STACK_OF(X509_OBJECT) *
|
||||
+X509_STORE_get1_objects(X509_STORE *store)
|
||||
+{
|
||||
+ STACK_OF(X509_OBJECT) *ret;
|
||||
+ if (!X509_STORE_lock(store)) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ ret = sk_X509_OBJECT_deep_copy(X509_STORE_get0_objects(store),
|
||||
+ x509_object_dup, X509_OBJECT_free);
|
||||
+ X509_STORE_unlock(store);
|
||||
+ return ret;
|
||||
+}
|
||||
+#endif
|
||||
+
|
||||
PyDoc_STRVAR(PySSLContext_sni_callback_doc,
|
||||
"Set a callback that will be called when a server name is provided by the SSL/TLS client in the SNI extension.\n\
|
||||
\n\
|
||||
@@ -4558,7 +4602,12 @@ _ssl__SSLContext_cert_store_stats_impl(P
|
||||
int x509 = 0, crl = 0, ca = 0, i;
|
||||
|
||||
store = SSL_CTX_get_cert_store(self->ctx);
|
||||
- objs = X509_STORE_get0_objects(store);
|
||||
+ objs = X509_STORE_get1_objects(store);
|
||||
+ if (objs == NULL) {
|
||||
+ PyErr_SetString(PyExc_MemoryError, "failed to query cert store");
|
||||
+ return NULL;
|
||||
+ }
|
||||
+
|
||||
for (i = 0; i < sk_X509_OBJECT_num(objs); i++) {
|
||||
obj = sk_X509_OBJECT_value(objs, i);
|
||||
switch (X509_OBJECT_get_type(obj)) {
|
||||
@@ -4572,12 +4621,11 @@ _ssl__SSLContext_cert_store_stats_impl(P
|
||||
crl++;
|
||||
break;
|
||||
default:
|
||||
- /* Ignore X509_LU_FAIL, X509_LU_RETRY, X509_LU_PKEY.
|
||||
- * As far as I can tell they are internal states and never
|
||||
- * stored in a cert store */
|
||||
+ /* Ignore unrecognized types. */
|
||||
break;
|
||||
}
|
||||
}
|
||||
+ sk_X509_OBJECT_pop_free(objs, X509_OBJECT_free);
|
||||
return Py_BuildValue("{sisisi}", "x509", x509, "crl", crl,
|
||||
"x509_ca", ca);
|
||||
}
|
||||
@@ -4609,7 +4657,12 @@ _ssl__SSLContext_get_ca_certs_impl(PySSL
|
||||
}
|
||||
|
||||
store = SSL_CTX_get_cert_store(self->ctx);
|
||||
- objs = X509_STORE_get0_objects(store);
|
||||
+ objs = X509_STORE_get1_objects(store);
|
||||
+ if (objs == NULL) {
|
||||
+ PyErr_SetString(PyExc_MemoryError, "failed to query cert store");
|
||||
+ goto error;
|
||||
+ }
|
||||
+
|
||||
for (i = 0; i < sk_X509_OBJECT_num(objs); i++) {
|
||||
X509_OBJECT *obj;
|
||||
X509 *cert;
|
||||
@@ -4637,9 +4690,11 @@ _ssl__SSLContext_get_ca_certs_impl(PySSL
|
||||
}
|
||||
Py_CLEAR(ci);
|
||||
}
|
||||
+ sk_X509_OBJECT_pop_free(objs, X509_OBJECT_free);
|
||||
return rlist;
|
||||
|
||||
error:
|
||||
+ sk_X509_OBJECT_pop_free(objs, X509_OBJECT_free);
|
||||
Py_XDECREF(ci);
|
||||
Py_XDECREF(rlist);
|
||||
return NULL;
|
@ -1,119 +0,0 @@
|
||||
From 8281fc11b47064f9a4908358befa9db6829f8b88 Mon Sep 17 00:00:00 2001
|
||||
From: Serhiy Storchaka <storchaka@gmail.com>
|
||||
Date: Wed, 10 Jan 2024 15:55:36 +0200
|
||||
Subject: [PATCH] gh-109858: Protect zipfile from "quoted-overlap" zipbomb
|
||||
(GH-110016)
|
||||
|
||||
Raise BadZipFile when try to read an entry that overlaps with other entry or
|
||||
central directory.
|
||||
(cherry picked from commit 66363b9a7b9fe7c99eba3a185b74c5fdbf842eba)
|
||||
|
||||
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
|
||||
---
|
||||
Lib/test/test_zipfile.py | 60 ++++++++++
|
||||
Lib/zipfile.py | 10 +
|
||||
Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst | 3
|
||||
3 files changed, 73 insertions(+)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
|
||||
|
||||
--- a/Lib/test/test_zipfile.py
|
||||
+++ b/Lib/test/test_zipfile.py
|
||||
@@ -2304,6 +2304,66 @@ class OtherTests(unittest.TestCase):
|
||||
zipf.read('a')
|
||||
self.assertEqual(len(zipf.read('b')), 1033)
|
||||
|
||||
+ @requires_zlib()
|
||||
+ def test_full_overlap(self):
|
||||
+ data = (
|
||||
+ b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
|
||||
+ b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00a\xed'
|
||||
+ b'\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\d\x0b`P'
|
||||
+ b'K\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2'
|
||||
+ b'\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00'
|
||||
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00aPK'
|
||||
+ b'\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
|
||||
+ b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00\x00'
|
||||
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00bPK\x05'
|
||||
+ b'\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00\x00/\x00\x00'
|
||||
+ b'\x00\x00\x00'
|
||||
+ )
|
||||
+ with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
|
||||
+ self.assertEqual(zipf.namelist(), ['a', 'b'])
|
||||
+ zi = zipf.getinfo('a')
|
||||
+ self.assertEqual(zi.header_offset, 0)
|
||||
+ self.assertEqual(zi.compress_size, 16)
|
||||
+ self.assertEqual(zi.file_size, 1033)
|
||||
+ zi = zipf.getinfo('b')
|
||||
+ self.assertEqual(zi.header_offset, 0)
|
||||
+ self.assertEqual(zi.compress_size, 16)
|
||||
+ self.assertEqual(zi.file_size, 1033)
|
||||
+ self.assertEqual(len(zipf.read('a')), 1033)
|
||||
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'File name.*differ'):
|
||||
+ zipf.read('b')
|
||||
+
|
||||
+ @requires_zlib()
|
||||
+ def test_quoted_overlap(self):
|
||||
+ data = (
|
||||
+ b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05Y\xfc'
|
||||
+ b'8\x044\x00\x00\x00(\x04\x00\x00\x01\x00\x00\x00a\x00'
|
||||
+ b'\x1f\x00\xe0\xffPK\x03\x04\x14\x00\x00\x00\x08\x00\xa0l'
|
||||
+ b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00'
|
||||
+ b'\x00\x00b\xed\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\'
|
||||
+ b'd\x0b`PK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0'
|
||||
+ b'lH\x05Y\xfc8\x044\x00\x00\x00(\x04\x00\x00\x01'
|
||||
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
+ b'\x00aPK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0l'
|
||||
+ b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00'
|
||||
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\x00\x00\x00'
|
||||
+ b'bPK\x05\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00'
|
||||
+ b'\x00S\x00\x00\x00\x00\x00'
|
||||
+ )
|
||||
+ with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
|
||||
+ self.assertEqual(zipf.namelist(), ['a', 'b'])
|
||||
+ zi = zipf.getinfo('a')
|
||||
+ self.assertEqual(zi.header_offset, 0)
|
||||
+ self.assertEqual(zi.compress_size, 52)
|
||||
+ self.assertEqual(zi.file_size, 1064)
|
||||
+ zi = zipf.getinfo('b')
|
||||
+ self.assertEqual(zi.header_offset, 36)
|
||||
+ self.assertEqual(zi.compress_size, 16)
|
||||
+ self.assertEqual(zi.file_size, 1033)
|
||||
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'Overlapped entries'):
|
||||
+ zipf.read('a')
|
||||
+ self.assertEqual(len(zipf.read('b')), 1033)
|
||||
+
|
||||
def tearDown(self):
|
||||
unlink(TESTFN)
|
||||
unlink(TESTFN2)
|
||||
--- a/Lib/zipfile.py
|
||||
+++ b/Lib/zipfile.py
|
||||
@@ -1217,6 +1217,12 @@ class _ZipWriteFile(io.BufferedIOBase):
|
||||
self._zipfile._writing = False
|
||||
|
||||
|
||||
+ end_offset = self._zipfile.start_dir
|
||||
+ for zinfo in sorted(self._zipfile.filelist,
|
||||
+ key=lambda zinfo: zinfo.header_offset,
|
||||
+ reverse=True):
|
||||
+ zinfo._end_offset = end_offset
|
||||
+ end_offset = zinfo.header_offset
|
||||
|
||||
class ZipFile:
|
||||
""" Class with methods to open, read, write, close, list zip files.
|
||||
@@ -1600,6 +1606,10 @@ class ZipFile:
|
||||
|
||||
if (zinfo._end_offset is not None and
|
||||
zef_file.tell() + zinfo.compress_size > zinfo._end_offset):
|
||||
+ raise BadZipFile(f"Overlapped entries: {zinfo.orig_filename!r} (possible zip bomb)")
|
||||
+
|
||||
+ if (zinfo._end_offset is not None and
|
||||
+ zef_file.tell() + zinfo.compress_size > zinfo._end_offset):
|
||||
raise BadZipFile(f"Overlapped entries: {zinfo.orig_filename!r} (possible zip bomb)")
|
||||
|
||||
# check for encrypted flag & handle password
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+Protect :mod:`zipfile` from "quoted-overlap" zipbomb. It now raises
|
||||
+BadZipFile when try to read an entry that overlaps with other entry or
|
||||
+central directory.
|
@ -1,366 +0,0 @@
|
||||
From b47c766d6085d7918edd7715750d135868fdafd6 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Wed, 24 Apr 2024 14:29:30 +0200
|
||||
Subject: [PATCH] gh-113171: gh-65056: Fix "private" (non-global) IP address
|
||||
ranges (GH-113179) (GH-113186) (GH-118177)
|
||||
|
||||
* GH-113171: Fix "private" (non-global) IP address ranges (GH-113179)
|
||||
|
||||
The _private_networks variables, used by various is_private
|
||||
implementations, were missing some ranges and at the same time had
|
||||
overly strict ranges (where there are more specific ranges considered
|
||||
globally reachable by the IANA registries).
|
||||
|
||||
This patch updates the ranges with what was missing or otherwise
|
||||
incorrect.
|
||||
|
||||
100.64.0.0/10 is left alone, for now, as it's been made special in [1].
|
||||
|
||||
The _address_exclude_many() call returns 8 networks for IPv4, 121
|
||||
networks for IPv6.
|
||||
|
||||
[1] https://github.com/python/cpython/issues/61602
|
||||
|
||||
* GH-65056: Improve the IP address' is_global/is_private documentation (GH-113186)
|
||||
|
||||
It wasn't clear what the semantics of is_global/is_private are and, when
|
||||
one gets to the bottom of it, it's not quite so simple (hence the
|
||||
exceptions listed).
|
||||
|
||||
(cherry picked from commit 2a4cbf17af19a01d942f9579342f77c39fbd23c4)
|
||||
(cherry picked from commit 40d75c2b7f5c67e254d0a025e0f2e2c7ada7f69f)
|
||||
|
||||
---------
|
||||
|
||||
(cherry picked from commit f86b17ac511e68192ba71f27e752321a3252cee3)
|
||||
|
||||
Co-authored-by: Jakub Stasiak <jakub@stasiak.at>
|
||||
---
|
||||
Doc/library/ipaddress.rst | 43 +++-
|
||||
Doc/whatsnew/3.11.rst | 9
|
||||
Lib/ipaddress.py | 105 +++++++---
|
||||
Lib/test/test_ipaddress.py | 21 +-
|
||||
Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst | 9
|
||||
5 files changed, 160 insertions(+), 27 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst
|
||||
|
||||
--- a/Doc/library/ipaddress.rst
|
||||
+++ b/Doc/library/ipaddress.rst
|
||||
@@ -188,18 +188,53 @@ write code that handles both IP versions
|
||||
|
||||
.. attribute:: is_private
|
||||
|
||||
- ``True`` if the address is allocated for private networks. See
|
||||
+ ``True`` if the address is defined as not globally reachable by
|
||||
iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
|
||||
- (for IPv6).
|
||||
+ (for IPv6) with the following exceptions:
|
||||
+
|
||||
+ * ``is_private`` is ``False`` for the shared address space (``100.64.0.0/10``)
|
||||
+ * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
|
||||
+ semantics of the underlying IPv4 addresses and the following condition holds
|
||||
+ (see :attr:`IPv6Address.ipv4_mapped`)::
|
||||
+
|
||||
+ address.is_private == address.ipv4_mapped.is_private
|
||||
+
|
||||
+ ``is_private`` has value opposite to :attr:`is_global`, except for the shared address space
|
||||
+ (``100.64.0.0/10`` range) where they are both ``False``.
|
||||
+
|
||||
+ .. versionchanged:: 3.11.10
|
||||
+
|
||||
+ Fixed some false positives and false negatives.
|
||||
+
|
||||
+ * ``192.0.0.0/24`` is considered private with the exception of ``192.0.0.9/32`` and
|
||||
+ ``192.0.0.10/32`` (previously: only the ``192.0.0.0/29`` sub-range was considered private).
|
||||
+ * ``64:ff9b:1::/48`` is considered private.
|
||||
+ * ``2002::/16`` is considered private.
|
||||
+ * There are exceptions within ``2001::/23`` (otherwise considered private): ``2001:1::1/128``,
|
||||
+ ``2001:1::2/128``, ``2001:3::/32``, ``2001:4:112::/48``, ``2001:20::/28``, ``2001:30::/28``.
|
||||
+ The exceptions are not considered private.
|
||||
|
||||
.. attribute:: is_global
|
||||
|
||||
- ``True`` if the address is allocated for public networks. See
|
||||
+ ``True`` if the address is defined as globally reachable by
|
||||
iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
|
||||
- (for IPv6).
|
||||
+ (for IPv6) with the following exception:
|
||||
+
|
||||
+ For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
|
||||
+ semantics of the underlying IPv4 addresses and the following condition holds
|
||||
+ (see :attr:`IPv6Address.ipv4_mapped`)::
|
||||
+
|
||||
+ address.is_global == address.ipv4_mapped.is_global
|
||||
+
|
||||
+ ``is_global`` has value opposite to :attr:`is_private`, except for the shared address space
|
||||
+ (``100.64.0.0/10`` range) where they are both ``False``.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
|
||||
+ .. versionchanged:: 3.11.10
|
||||
+
|
||||
+ Fixed some false positives and false negatives, see :attr:`is_private` for details.
|
||||
+
|
||||
.. attribute:: is_unspecified
|
||||
|
||||
``True`` if the address is unspecified. See :RFC:`5735` (for IPv4)
|
||||
--- a/Doc/whatsnew/3.11.rst
|
||||
+++ b/Doc/whatsnew/3.11.rst
|
||||
@@ -2727,3 +2727,12 @@ OpenSSL
|
||||
* Windows builds and macOS installers from python.org now use OpenSSL 3.0.
|
||||
|
||||
.. _libb2: https://www.blake2.net/
|
||||
+
|
||||
+Notable changes in 3.11.10
|
||||
+==========================
|
||||
+
|
||||
+ipaddress
|
||||
+---------
|
||||
+
|
||||
+* Fixed ``is_global`` and ``is_private`` behavior in ``IPv4Address``,
|
||||
+ ``IPv6Address``, ``IPv4Network`` and ``IPv6Network``.
|
||||
--- a/Lib/ipaddress.py
|
||||
+++ b/Lib/ipaddress.py
|
||||
@@ -1086,7 +1086,11 @@ class _BaseNetwork(_IPAddressBase):
|
||||
"""
|
||||
return any(self.network_address in priv_network and
|
||||
self.broadcast_address in priv_network
|
||||
- for priv_network in self._constants._private_networks)
|
||||
+ for priv_network in self._constants._private_networks) and all(
|
||||
+ self.network_address not in network and
|
||||
+ self.broadcast_address not in network
|
||||
+ for network in self._constants._private_networks_exceptions
|
||||
+ )
|
||||
|
||||
@property
|
||||
def is_global(self):
|
||||
@@ -1333,18 +1337,41 @@ class IPv4Address(_BaseV4, _BaseAddress)
|
||||
@property
|
||||
@functools.lru_cache()
|
||||
def is_private(self):
|
||||
- """Test if this address is allocated for private networks.
|
||||
-
|
||||
- Returns:
|
||||
- A boolean, True if the address is reserved per
|
||||
- iana-ipv4-special-registry.
|
||||
-
|
||||
- """
|
||||
- return any(self in net for net in self._constants._private_networks)
|
||||
+ """``True`` if the address is defined as not globally reachable by
|
||||
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
|
||||
+ (for IPv6) with the following exceptions:
|
||||
+
|
||||
+ * ``is_private`` is ``False`` for ``100.64.0.0/10``
|
||||
+ * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
|
||||
+ semantics of the underlying IPv4 addresses and the following condition holds
|
||||
+ (see :attr:`IPv6Address.ipv4_mapped`)::
|
||||
+
|
||||
+ address.is_private == address.ipv4_mapped.is_private
|
||||
+
|
||||
+ ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10``
|
||||
+ IPv4 range where they are both ``False``.
|
||||
+ """
|
||||
+ return (
|
||||
+ any(self in net for net in self._constants._private_networks)
|
||||
+ and all(self not in net for net in self._constants._private_networks_exceptions)
|
||||
+ )
|
||||
|
||||
@property
|
||||
@functools.lru_cache()
|
||||
def is_global(self):
|
||||
+ """``True`` if the address is defined as globally reachable by
|
||||
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
|
||||
+ (for IPv6) with the following exception:
|
||||
+
|
||||
+ For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
|
||||
+ semantics of the underlying IPv4 addresses and the following condition holds
|
||||
+ (see :attr:`IPv6Address.ipv4_mapped`)::
|
||||
+
|
||||
+ address.is_global == address.ipv4_mapped.is_global
|
||||
+
|
||||
+ ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10``
|
||||
+ IPv4 range where they are both ``False``.
|
||||
+ """
|
||||
return self not in self._constants._public_network and not self.is_private
|
||||
|
||||
@property
|
||||
@@ -1548,13 +1575,15 @@ class _IPv4Constants:
|
||||
|
||||
_public_network = IPv4Network('100.64.0.0/10')
|
||||
|
||||
+ # Not globally reachable address blocks listed on
|
||||
+ # https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml
|
||||
_private_networks = [
|
||||
IPv4Network('0.0.0.0/8'),
|
||||
IPv4Network('10.0.0.0/8'),
|
||||
IPv4Network('127.0.0.0/8'),
|
||||
IPv4Network('169.254.0.0/16'),
|
||||
IPv4Network('172.16.0.0/12'),
|
||||
- IPv4Network('192.0.0.0/29'),
|
||||
+ IPv4Network('192.0.0.0/24'),
|
||||
IPv4Network('192.0.0.170/31'),
|
||||
IPv4Network('192.0.2.0/24'),
|
||||
IPv4Network('192.168.0.0/16'),
|
||||
@@ -1565,6 +1594,11 @@ class _IPv4Constants:
|
||||
IPv4Network('255.255.255.255/32'),
|
||||
]
|
||||
|
||||
+ _private_networks_exceptions = [
|
||||
+ IPv4Network('192.0.0.9/32'),
|
||||
+ IPv4Network('192.0.0.10/32'),
|
||||
+ ]
|
||||
+
|
||||
_reserved_network = IPv4Network('240.0.0.0/4')
|
||||
|
||||
_unspecified_address = IPv4Address('0.0.0.0')
|
||||
@@ -2010,27 +2044,42 @@ class IPv6Address(_BaseV6, _BaseAddress)
|
||||
@property
|
||||
@functools.lru_cache()
|
||||
def is_private(self):
|
||||
- """Test if this address is allocated for private networks.
|
||||
+ """``True`` if the address is defined as not globally reachable by
|
||||
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
|
||||
+ (for IPv6) with the following exceptions:
|
||||
+
|
||||
+ * ``is_private`` is ``False`` for ``100.64.0.0/10``
|
||||
+ * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
|
||||
+ semantics of the underlying IPv4 addresses and the following condition holds
|
||||
+ (see :attr:`IPv6Address.ipv4_mapped`)::
|
||||
|
||||
- Returns:
|
||||
- A boolean, True if the address is reserved per
|
||||
- iana-ipv6-special-registry, or is ipv4_mapped and is
|
||||
- reserved in the iana-ipv4-special-registry.
|
||||
+ address.is_private == address.ipv4_mapped.is_private
|
||||
|
||||
+ ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10``
|
||||
+ IPv4 range where they are both ``False``.
|
||||
"""
|
||||
ipv4_mapped = self.ipv4_mapped
|
||||
if ipv4_mapped is not None:
|
||||
return ipv4_mapped.is_private
|
||||
- return any(self in net for net in self._constants._private_networks)
|
||||
+ return (
|
||||
+ any(self in net for net in self._constants._private_networks)
|
||||
+ and all(self not in net for net in self._constants._private_networks_exceptions)
|
||||
+ )
|
||||
|
||||
@property
|
||||
def is_global(self):
|
||||
- """Test if this address is allocated for public networks.
|
||||
+ """``True`` if the address is defined as globally reachable by
|
||||
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
|
||||
+ (for IPv6) with the following exception:
|
||||
+
|
||||
+ For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
|
||||
+ semantics of the underlying IPv4 addresses and the following condition holds
|
||||
+ (see :attr:`IPv6Address.ipv4_mapped`)::
|
||||
|
||||
- Returns:
|
||||
- A boolean, true if the address is not reserved per
|
||||
- iana-ipv6-special-registry.
|
||||
+ address.is_global == address.ipv4_mapped.is_global
|
||||
|
||||
+ ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10``
|
||||
+ IPv4 range where they are both ``False``.
|
||||
"""
|
||||
return not self.is_private
|
||||
|
||||
@@ -2271,19 +2320,31 @@ class _IPv6Constants:
|
||||
|
||||
_multicast_network = IPv6Network('ff00::/8')
|
||||
|
||||
+ # Not globally reachable address blocks listed on
|
||||
+ # https://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml
|
||||
_private_networks = [
|
||||
IPv6Network('::1/128'),
|
||||
IPv6Network('::/128'),
|
||||
IPv6Network('::ffff:0:0/96'),
|
||||
+ IPv6Network('64:ff9b:1::/48'),
|
||||
IPv6Network('100::/64'),
|
||||
IPv6Network('2001::/23'),
|
||||
- IPv6Network('2001:2::/48'),
|
||||
IPv6Network('2001:db8::/32'),
|
||||
- IPv6Network('2001:10::/28'),
|
||||
+ # IANA says N/A, let's consider it not globally reachable to be safe
|
||||
+ IPv6Network('2002::/16'),
|
||||
IPv6Network('fc00::/7'),
|
||||
IPv6Network('fe80::/10'),
|
||||
]
|
||||
|
||||
+ _private_networks_exceptions = [
|
||||
+ IPv6Network('2001:1::1/128'),
|
||||
+ IPv6Network('2001:1::2/128'),
|
||||
+ IPv6Network('2001:3::/32'),
|
||||
+ IPv6Network('2001:4:112::/48'),
|
||||
+ IPv6Network('2001:20::/28'),
|
||||
+ IPv6Network('2001:30::/28'),
|
||||
+ ]
|
||||
+
|
||||
_reserved_networks = [
|
||||
IPv6Network('::/8'), IPv6Network('100::/8'),
|
||||
IPv6Network('200::/7'), IPv6Network('400::/6'),
|
||||
--- a/Lib/test/test_ipaddress.py
|
||||
+++ b/Lib/test/test_ipaddress.py
|
||||
@@ -2269,6 +2269,10 @@ class IpaddrUnitTest(unittest.TestCase):
|
||||
self.assertEqual(True, ipaddress.ip_address(
|
||||
'172.31.255.255').is_private)
|
||||
self.assertEqual(False, ipaddress.ip_address('172.32.0.0').is_private)
|
||||
+ self.assertFalse(ipaddress.ip_address('192.0.0.0').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('192.0.0.9').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('192.0.0.10').is_global)
|
||||
+ self.assertFalse(ipaddress.ip_address('192.0.0.255').is_global)
|
||||
|
||||
self.assertEqual(True,
|
||||
ipaddress.ip_address('169.254.100.200').is_link_local)
|
||||
@@ -2294,6 +2298,7 @@ class IpaddrUnitTest(unittest.TestCase):
|
||||
self.assertEqual(True, ipaddress.ip_network("169.254.0.0/16").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("172.16.0.0/12").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("192.0.0.0/29").is_private)
|
||||
+ self.assertEqual(False, ipaddress.ip_network("192.0.0.9/32").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("192.0.0.170/31").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("192.0.2.0/24").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("192.168.0.0/16").is_private)
|
||||
@@ -2310,8 +2315,8 @@ class IpaddrUnitTest(unittest.TestCase):
|
||||
self.assertEqual(True, ipaddress.ip_network("::/128").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("::ffff:0:0/96").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("100::/64").is_private)
|
||||
- self.assertEqual(True, ipaddress.ip_network("2001::/23").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("2001:2::/48").is_private)
|
||||
+ self.assertEqual(False, ipaddress.ip_network("2001:3::/48").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("2001:db8::/32").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("2001:10::/28").is_private)
|
||||
self.assertEqual(True, ipaddress.ip_network("fc00::/7").is_private)
|
||||
@@ -2390,6 +2395,20 @@ class IpaddrUnitTest(unittest.TestCase):
|
||||
self.assertEqual(True, ipaddress.ip_address('0::0').is_unspecified)
|
||||
self.assertEqual(False, ipaddress.ip_address('::1').is_unspecified)
|
||||
|
||||
+ self.assertFalse(ipaddress.ip_address('64:ff9b:1::').is_global)
|
||||
+ self.assertFalse(ipaddress.ip_address('2001::').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('2001:1::1').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('2001:1::2').is_global)
|
||||
+ self.assertFalse(ipaddress.ip_address('2001:2::').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('2001:3::').is_global)
|
||||
+ self.assertFalse(ipaddress.ip_address('2001:4::').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('2001:4:112::').is_global)
|
||||
+ self.assertFalse(ipaddress.ip_address('2001:10::').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('2001:20::').is_global)
|
||||
+ self.assertTrue(ipaddress.ip_address('2001:30::').is_global)
|
||||
+ self.assertFalse(ipaddress.ip_address('2001:40::').is_global)
|
||||
+ self.assertFalse(ipaddress.ip_address('2002::').is_global)
|
||||
+
|
||||
# some generic IETF reserved addresses
|
||||
self.assertEqual(True, ipaddress.ip_address('100::').is_reserved)
|
||||
self.assertEqual(True, ipaddress.ip_network('4000::1/128').is_reserved)
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst
|
||||
@@ -0,0 +1,9 @@
|
||||
+Fixed various false positives and false negatives in
|
||||
+
|
||||
+* :attr:`ipaddress.IPv4Address.is_private` (see these docs for details)
|
||||
+* :attr:`ipaddress.IPv4Address.is_global`
|
||||
+* :attr:`ipaddress.IPv6Address.is_private`
|
||||
+* :attr:`ipaddress.IPv6Address.is_global`
|
||||
+
|
||||
+Also in the corresponding :class:`ipaddress.IPv4Network` and :class:`ipaddress.IPv6Network`
|
||||
+attributes.
|
@ -1,238 +0,0 @@
|
||||
From b7431133441a92670132600e5af78b64dd25539b Mon Sep 17 00:00:00 2001
|
||||
From: Seth Michael Larson <seth@python.org>
|
||||
Date: Sat, 31 Aug 2024 17:17:05 -0500
|
||||
Subject: [PATCH] [3.11] gh-121285: Remove backtracking when parsing tarfile
|
||||
headers (GH-121286)
|
||||
|
||||
* Remove backtracking when parsing tarfile headers
|
||||
* Rewrite PAX header parsing to be stricter
|
||||
* Optimize parsing of GNU extended sparse headers v0.0
|
||||
|
||||
(cherry picked from commit 34ddb64d088dd7ccc321f6103d23153256caa5d4)
|
||||
|
||||
Co-authored-by: Seth Michael Larson <seth@python.org>
|
||||
Co-authored-by: Kirill Podoprigora <kirill.bast9@mail.ru>
|
||||
Co-authored-by: Gregory P. Smith <greg@krypto.org>
|
||||
---
|
||||
Lib/tarfile.py | 105 ++++++----
|
||||
Lib/test/test_tarfile.py | 42 ++++
|
||||
Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst | 2
|
||||
3 files changed, 111 insertions(+), 38 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -842,6 +842,9 @@ _NAMED_FILTERS = {
|
||||
# Sentinel for replace() defaults, meaning "don't change the attribute"
|
||||
_KEEP = object()
|
||||
|
||||
+# Header length is digits followed by a space.
|
||||
+_header_length_prefix_re = re.compile(br"([0-9]{1,20}) ")
|
||||
+
|
||||
class TarInfo(object):
|
||||
"""Informational class which holds the details about an
|
||||
archive member given by a tar header block.
|
||||
@@ -1411,41 +1414,59 @@ class TarInfo(object):
|
||||
else:
|
||||
pax_headers = tarfile.pax_headers.copy()
|
||||
|
||||
- # Check if the pax header contains a hdrcharset field. This tells us
|
||||
- # the encoding of the path, linkpath, uname and gname fields. Normally,
|
||||
- # these fields are UTF-8 encoded but since POSIX.1-2008 tar
|
||||
- # implementations are allowed to store them as raw binary strings if
|
||||
- # the translation to UTF-8 fails.
|
||||
- match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
|
||||
- if match is not None:
|
||||
- pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
|
||||
-
|
||||
- # For the time being, we don't care about anything other than "BINARY".
|
||||
- # The only other value that is currently allowed by the standard is
|
||||
- # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
|
||||
- hdrcharset = pax_headers.get("hdrcharset")
|
||||
- if hdrcharset == "BINARY":
|
||||
- encoding = tarfile.encoding
|
||||
- else:
|
||||
- encoding = "utf-8"
|
||||
-
|
||||
# Parse pax header information. A record looks like that:
|
||||
# "%d %s=%s\n" % (length, keyword, value). length is the size
|
||||
# of the complete record including the length field itself and
|
||||
- # the newline. keyword and value are both UTF-8 encoded strings.
|
||||
- regex = re.compile(br"(\d+) ([^=]+)=")
|
||||
+ # the newline.
|
||||
pos = 0
|
||||
- while True:
|
||||
- match = regex.match(buf, pos)
|
||||
- if not match:
|
||||
- break
|
||||
+ encoding = None
|
||||
+ raw_headers = []
|
||||
+ while len(buf) > pos and buf[pos] != 0x00:
|
||||
+ if not (match := _header_length_prefix_re.match(buf, pos)):
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+ try:
|
||||
+ length = int(match.group(1))
|
||||
+ except ValueError:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+ # Headers must be at least 5 bytes, shortest being '5 x=\n'.
|
||||
+ # Value is allowed to be empty.
|
||||
+ if length < 5:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+ if pos + length > len(buf):
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+
|
||||
+ header_value_end_offset = match.start(1) + length - 1 # Last byte of the header
|
||||
+ keyword_and_value = buf[match.end(1) + 1:header_value_end_offset]
|
||||
+ raw_keyword, equals, raw_value = keyword_and_value.partition(b"=")
|
||||
|
||||
- length, keyword = match.groups()
|
||||
- length = int(length)
|
||||
- if length == 0:
|
||||
+ # Check the framing of the header. The last character must be '\n' (0x0A)
|
||||
+ if not raw_keyword or equals != b"=" or buf[header_value_end_offset] != 0x0A:
|
||||
raise InvalidHeaderError("invalid header")
|
||||
- value = buf[match.end(2) + 1:match.start(1) + length - 1]
|
||||
+ raw_headers.append((length, raw_keyword, raw_value))
|
||||
+
|
||||
+ # Check if the pax header contains a hdrcharset field. This tells us
|
||||
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
|
||||
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
|
||||
+ # implementations are allowed to store them as raw binary strings if
|
||||
+ # the translation to UTF-8 fails. For the time being, we don't care about
|
||||
+ # anything other than "BINARY". The only other value that is currently
|
||||
+ # allowed by the standard is "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
|
||||
+ # Note that we only follow the initial 'hdrcharset' setting to preserve
|
||||
+ # the initial behavior of the 'tarfile' module.
|
||||
+ if raw_keyword == b"hdrcharset" and encoding is None:
|
||||
+ if raw_value == b"BINARY":
|
||||
+ encoding = tarfile.encoding
|
||||
+ else: # This branch ensures only the first 'hdrcharset' header is used.
|
||||
+ encoding = "utf-8"
|
||||
|
||||
+ pos += length
|
||||
+
|
||||
+ # If no explicit hdrcharset is set, we use UTF-8 as a default.
|
||||
+ if encoding is None:
|
||||
+ encoding = "utf-8"
|
||||
+
|
||||
+ # After parsing the raw headers we can decode them to text.
|
||||
+ for length, raw_keyword, raw_value in raw_headers:
|
||||
# Normally, we could just use "utf-8" as the encoding and "strict"
|
||||
# as the error handler, but we better not take the risk. For
|
||||
# example, GNU tar <= 1.23 is known to store filenames it cannot
|
||||
@@ -1453,17 +1474,16 @@ class TarInfo(object):
|
||||
# hdrcharset=BINARY header).
|
||||
# We first try the strict standard encoding, and if that fails we
|
||||
# fall back on the user's encoding and error handler.
|
||||
- keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
|
||||
+ keyword = self._decode_pax_field(raw_keyword, "utf-8", "utf-8",
|
||||
tarfile.errors)
|
||||
if keyword in PAX_NAME_FIELDS:
|
||||
- value = self._decode_pax_field(value, encoding, tarfile.encoding,
|
||||
+ value = self._decode_pax_field(raw_value, encoding, tarfile.encoding,
|
||||
tarfile.errors)
|
||||
else:
|
||||
- value = self._decode_pax_field(value, "utf-8", "utf-8",
|
||||
+ value = self._decode_pax_field(raw_value, "utf-8", "utf-8",
|
||||
tarfile.errors)
|
||||
|
||||
pax_headers[keyword] = value
|
||||
- pos += length
|
||||
|
||||
# Fetch the next header.
|
||||
try:
|
||||
@@ -1478,7 +1498,7 @@ class TarInfo(object):
|
||||
|
||||
elif "GNU.sparse.size" in pax_headers:
|
||||
# GNU extended sparse format version 0.0.
|
||||
- self._proc_gnusparse_00(next, pax_headers, buf)
|
||||
+ self._proc_gnusparse_00(next, raw_headers)
|
||||
|
||||
elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
|
||||
# GNU extended sparse format version 1.0.
|
||||
@@ -1500,15 +1520,24 @@ class TarInfo(object):
|
||||
|
||||
return next
|
||||
|
||||
- def _proc_gnusparse_00(self, next, pax_headers, buf):
|
||||
+ def _proc_gnusparse_00(self, next, raw_headers):
|
||||
"""Process a GNU tar extended sparse header, version 0.0.
|
||||
"""
|
||||
offsets = []
|
||||
- for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
|
||||
- offsets.append(int(match.group(1)))
|
||||
numbytes = []
|
||||
- for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
|
||||
- numbytes.append(int(match.group(1)))
|
||||
+ for _, keyword, value in raw_headers:
|
||||
+ if keyword == b"GNU.sparse.offset":
|
||||
+ try:
|
||||
+ offsets.append(int(value.decode()))
|
||||
+ except ValueError:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+
|
||||
+ elif keyword == b"GNU.sparse.numbytes":
|
||||
+ try:
|
||||
+ numbytes.append(int(value.decode()))
|
||||
+ except ValueError:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+
|
||||
next.sparse = list(zip(offsets, numbytes))
|
||||
|
||||
def _proc_gnusparse_01(self, next, pax_headers):
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -1184,6 +1184,48 @@ class PaxReadTest(LongnameTest, ReadTest
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
+ def test_pax_header_bad_formats(self):
|
||||
+ # The fields from the pax header have priority over the
|
||||
+ # TarInfo.
|
||||
+ pax_header_replacements = (
|
||||
+ b" foo=bar\n",
|
||||
+ b"0 \n",
|
||||
+ b"1 \n",
|
||||
+ b"2 \n",
|
||||
+ b"3 =\n",
|
||||
+ b"4 =a\n",
|
||||
+ b"1000000 foo=bar\n",
|
||||
+ b"0 foo=bar\n",
|
||||
+ b"-12 foo=bar\n",
|
||||
+ b"000000000000000000000000036 foo=bar\n",
|
||||
+ )
|
||||
+ pax_headers = {"foo": "bar"}
|
||||
+
|
||||
+ for replacement in pax_header_replacements:
|
||||
+ with self.subTest(header=replacement):
|
||||
+ tar = tarfile.open(tmpname, "w", format=tarfile.PAX_FORMAT,
|
||||
+ encoding="iso8859-1")
|
||||
+ try:
|
||||
+ t = tarfile.TarInfo()
|
||||
+ t.name = "pax" # non-ASCII
|
||||
+ t.uid = 1
|
||||
+ t.pax_headers = pax_headers
|
||||
+ tar.addfile(t)
|
||||
+ finally:
|
||||
+ tar.close()
|
||||
+
|
||||
+ with open(tmpname, "rb") as f:
|
||||
+ data = f.read()
|
||||
+ self.assertIn(b"11 foo=bar\n", data)
|
||||
+ data = data.replace(b"11 foo=bar\n", replacement)
|
||||
+
|
||||
+ with open(tmpname, "wb") as f:
|
||||
+ f.truncate()
|
||||
+ f.write(data)
|
||||
+
|
||||
+ with self.assertRaisesRegex(tarfile.ReadError, r"method tar: ReadError\('invalid header'\)"):
|
||||
+ tarfile.open(tmpname, encoding="iso8859-1")
|
||||
+
|
||||
|
||||
class WriteTestBase(TarTest):
|
||||
# Put all write tests in here that are supposed to be tested
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Remove backtracking from tarfile header parsing for ``hdrcharset``, PAX, and
|
||||
+GNU sparse headers.
|
@ -1,348 +0,0 @@
|
||||
From f9ddc53ea850fb02d640a9b3263756d43fb6d868 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Wed, 31 Jul 2024 00:19:48 +0200
|
||||
Subject: [PATCH] [3.11] gh-121650: Encode newlines in headers, and verify
|
||||
headers are sound (GH-122233)
|
||||
|
||||
GH-GH- Encode header parts that contain newlines
|
||||
|
||||
Per RFC 2047:
|
||||
|
||||
> [...] these encoding schemes allow the
|
||||
> encoding of arbitrary octet values, mail readers that implement this
|
||||
> decoding should also ensure that display of the decoded data on the
|
||||
> recipient's terminal will not cause unwanted side-effects
|
||||
|
||||
It seems that the "quoted-word" scheme is a valid way to include
|
||||
a newline character in a header value, just like we already allow
|
||||
undecodable bytes or control characters.
|
||||
They do need to be properly quoted when serialized to text, though.
|
||||
|
||||
GH-GH- Verify that email headers are well-formed
|
||||
|
||||
This should fail for custom fold() implementations that aren't careful
|
||||
about newlines.
|
||||
|
||||
(cherry picked from commit 097633981879b3c9de9a1dd120d3aa585ecc2384)
|
||||
|
||||
Co-authored-by: Petr Viktorin <encukou@gmail.com>
|
||||
Co-authored-by: Bas Bloemsaat <bas@bloemsaat.org>
|
||||
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
|
||||
---
|
||||
Doc/library/email.errors.rst | 7 +
|
||||
Doc/library/email.policy.rst | 18 ++
|
||||
Doc/whatsnew/3.11.rst | 13 ++
|
||||
Lib/email/_header_value_parser.py | 12 +
|
||||
Lib/email/_policybase.py | 8 +
|
||||
Lib/email/errors.py | 4
|
||||
Lib/email/generator.py | 13 +-
|
||||
Lib/test/test_email/test_generator.py | 62 ++++++++++
|
||||
Lib/test/test_email/test_policy.py | 26 ++++
|
||||
Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst | 5
|
||||
10 files changed, 164 insertions(+), 4 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
|
||||
|
||||
--- a/Doc/library/email.errors.rst
|
||||
+++ b/Doc/library/email.errors.rst
|
||||
@@ -58,6 +58,13 @@ The following exception classes are defi
|
||||
:class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g.
|
||||
:class:`~email.mime.image.MIMEImage`).
|
||||
|
||||
+
|
||||
+.. exception:: HeaderWriteError()
|
||||
+
|
||||
+ Raised when an error occurs when the :mod:`~email.generator` outputs
|
||||
+ headers.
|
||||
+
|
||||
+
|
||||
.. exception:: MessageDefect()
|
||||
|
||||
This is the base class for all defects found when parsing email messages.
|
||||
--- a/Doc/library/email.policy.rst
|
||||
+++ b/Doc/library/email.policy.rst
|
||||
@@ -228,6 +228,24 @@ added matters. To illustrate::
|
||||
|
||||
.. versionadded:: 3.6
|
||||
|
||||
+
|
||||
+ .. attribute:: verify_generated_headers
|
||||
+
|
||||
+ If ``True`` (the default), the generator will raise
|
||||
+ :exc:`~email.errors.HeaderWriteError` instead of writing a header
|
||||
+ that is improperly folded or delimited, such that it would
|
||||
+ be parsed as multiple headers or joined with adjacent data.
|
||||
+ Such headers can be generated by custom header classes or bugs
|
||||
+ in the ``email`` module.
|
||||
+
|
||||
+ As it's a security feature, this defaults to ``True`` even in the
|
||||
+ :class:`~email.policy.Compat32` policy.
|
||||
+ For backwards compatible, but unsafe, behavior, it must be set to
|
||||
+ ``False`` explicitly.
|
||||
+
|
||||
+ .. versionadded:: 3.11.10
|
||||
+
|
||||
+
|
||||
The following :class:`Policy` method is intended to be called by code using
|
||||
the email library to create policy instances with custom settings:
|
||||
|
||||
--- a/Doc/whatsnew/3.11.rst
|
||||
+++ b/Doc/whatsnew/3.11.rst
|
||||
@@ -2728,6 +2728,7 @@ OpenSSL
|
||||
|
||||
.. _libb2: https://www.blake2.net/
|
||||
|
||||
+
|
||||
Notable changes in 3.11.10
|
||||
==========================
|
||||
|
||||
@@ -2736,3 +2737,15 @@ ipaddress
|
||||
|
||||
* Fixed ``is_global`` and ``is_private`` behavior in ``IPv4Address``,
|
||||
``IPv6Address``, ``IPv4Network`` and ``IPv6Network``.
|
||||
+
|
||||
+email
|
||||
+-----
|
||||
+
|
||||
+* Headers with embedded newlines are now quoted on output.
|
||||
+
|
||||
+ The :mod:`~email.generator` will now refuse to serialize (write) headers
|
||||
+ that are improperly folded or delimited, such that they would be parsed as
|
||||
+ multiple headers or joined with adjacent data.
|
||||
+ If you need to turn this safety feature off,
|
||||
+ set :attr:`~email.policy.Policy.verify_generated_headers`.
|
||||
+ (Contributed by Bas Bloemsaat and Petr Viktorin in :gh:`121650`.)
|
||||
--- a/Lib/email/_header_value_parser.py
|
||||
+++ b/Lib/email/_header_value_parser.py
|
||||
@@ -92,6 +92,8 @@ TOKEN_ENDS = TSPECIALS | WSP
|
||||
ASPECIALS = TSPECIALS | set("*'%")
|
||||
ATTRIBUTE_ENDS = ASPECIALS | WSP
|
||||
EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
|
||||
+NLSET = {'\n', '\r'}
|
||||
+SPECIALSNL = SPECIALS | NLSET
|
||||
|
||||
def quote_string(value):
|
||||
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
|
||||
@@ -2779,9 +2781,13 @@ def _refold_parse_tree(parse_tree, *, po
|
||||
wrap_as_ew_blocked -= 1
|
||||
continue
|
||||
tstr = str(part)
|
||||
- if part.token_type == 'ptext' and set(tstr) & SPECIALS:
|
||||
- # Encode if tstr contains special characters.
|
||||
- want_encoding = True
|
||||
+ if not want_encoding:
|
||||
+ if part.token_type == 'ptext':
|
||||
+ # Encode if tstr contains special characters.
|
||||
+ want_encoding = not SPECIALSNL.isdisjoint(tstr)
|
||||
+ else:
|
||||
+ # Encode if tstr contains newlines.
|
||||
+ want_encoding = not NLSET.isdisjoint(tstr)
|
||||
try:
|
||||
tstr.encode(encoding)
|
||||
charset = encoding
|
||||
--- a/Lib/email/_policybase.py
|
||||
+++ b/Lib/email/_policybase.py
|
||||
@@ -157,6 +157,13 @@ class Policy(_PolicyBase, metaclass=abc.
|
||||
message_factory -- the class to use to create new message objects.
|
||||
If the value is None, the default is Message.
|
||||
|
||||
+ verify_generated_headers
|
||||
+ -- if true, the generator verifies that each header
|
||||
+ they are properly folded, so that a parser won't
|
||||
+ treat it as multiple headers, start-of-body, or
|
||||
+ part of another header.
|
||||
+ This is a check against custom Header & fold()
|
||||
+ implementations.
|
||||
"""
|
||||
|
||||
raise_on_defect = False
|
||||
@@ -165,6 +172,7 @@ class Policy(_PolicyBase, metaclass=abc.
|
||||
max_line_length = 78
|
||||
mangle_from_ = False
|
||||
message_factory = None
|
||||
+ verify_generated_headers = True
|
||||
|
||||
def handle_defect(self, obj, defect):
|
||||
"""Based on policy, either raise defect or call register_defect.
|
||||
--- a/Lib/email/errors.py
|
||||
+++ b/Lib/email/errors.py
|
||||
@@ -29,6 +29,10 @@ class CharsetError(MessageError):
|
||||
"""An illegal charset was given."""
|
||||
|
||||
|
||||
+class HeaderWriteError(MessageError):
|
||||
+ """Error while writing headers."""
|
||||
+
|
||||
+
|
||||
# These are parsing defects which the parser was able to work around.
|
||||
class MessageDefect(ValueError):
|
||||
"""Base class for a message defect."""
|
||||
--- a/Lib/email/generator.py
|
||||
+++ b/Lib/email/generator.py
|
||||
@@ -14,12 +14,14 @@ import random
|
||||
from copy import deepcopy
|
||||
from io import StringIO, BytesIO
|
||||
from email.utils import _has_surrogates
|
||||
+from email.errors import HeaderWriteError
|
||||
|
||||
UNDERSCORE = '_'
|
||||
NL = '\n' # XXX: no longer used by the code below.
|
||||
|
||||
NLCRE = re.compile(r'\r\n|\r|\n')
|
||||
fcre = re.compile(r'^From ', re.MULTILINE)
|
||||
+NEWLINE_WITHOUT_FWSP = re.compile(r'\r\n[^ \t]|\r[^ \n\t]|\n[^ \t]')
|
||||
|
||||
|
||||
class Generator:
|
||||
@@ -222,7 +224,16 @@ class Generator:
|
||||
|
||||
def _write_headers(self, msg):
|
||||
for h, v in msg.raw_items():
|
||||
- self.write(self.policy.fold(h, v))
|
||||
+ folded = self.policy.fold(h, v)
|
||||
+ if self.policy.verify_generated_headers:
|
||||
+ linesep = self.policy.linesep
|
||||
+ if not folded.endswith(self.policy.linesep):
|
||||
+ raise HeaderWriteError(
|
||||
+ f'folded header does not end with {linesep!r}: {folded!r}')
|
||||
+ if NEWLINE_WITHOUT_FWSP.search(folded.removesuffix(linesep)):
|
||||
+ raise HeaderWriteError(
|
||||
+ f'folded header contains newline: {folded!r}')
|
||||
+ self.write(folded)
|
||||
# A blank line always separates headers from body
|
||||
self.write(self._NL)
|
||||
|
||||
--- a/Lib/test/test_email/test_generator.py
|
||||
+++ b/Lib/test/test_email/test_generator.py
|
||||
@@ -6,6 +6,7 @@ from email.message import EmailMessage
|
||||
from email.generator import Generator, BytesGenerator
|
||||
from email.headerregistry import Address
|
||||
from email import policy
|
||||
+import email.errors
|
||||
from test.test_email import TestEmailBase, parameterize
|
||||
|
||||
|
||||
@@ -216,6 +217,44 @@ class TestGeneratorBase:
|
||||
g.flatten(msg)
|
||||
self.assertEqual(s.getvalue(), self.typ(expected))
|
||||
|
||||
+ def test_keep_encoded_newlines(self):
|
||||
+ msg = self.msgmaker(self.typ(textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)))
|
||||
+ expected = textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)
|
||||
+ s = self.ioclass()
|
||||
+ g = self.genclass(s, policy=self.policy.clone(max_line_length=80))
|
||||
+ g.flatten(msg)
|
||||
+ self.assertEqual(s.getvalue(), self.typ(expected))
|
||||
+
|
||||
+ def test_keep_long_encoded_newlines(self):
|
||||
+ msg = self.msgmaker(self.typ(textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)))
|
||||
+ expected = textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject
|
||||
+ =?utf-8?q?=0A?=Bcc:
|
||||
+ injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)
|
||||
+ s = self.ioclass()
|
||||
+ g = self.genclass(s, policy=self.policy.clone(max_line_length=30))
|
||||
+ g.flatten(msg)
|
||||
+ self.assertEqual(s.getvalue(), self.typ(expected))
|
||||
+
|
||||
|
||||
class TestGenerator(TestGeneratorBase, TestEmailBase):
|
||||
|
||||
@@ -224,6 +263,29 @@ class TestGenerator(TestGeneratorBase, T
|
||||
ioclass = io.StringIO
|
||||
typ = str
|
||||
|
||||
+ def test_verify_generated_headers(self):
|
||||
+ """gh-121650: by default the generator prevents header injection"""
|
||||
+ class LiteralHeader(str):
|
||||
+ name = 'Header'
|
||||
+ def fold(self, **kwargs):
|
||||
+ return self
|
||||
+
|
||||
+ for text in (
|
||||
+ 'Value\r\nBad Injection\r\n',
|
||||
+ 'NoNewLine'
|
||||
+ ):
|
||||
+ with self.subTest(text=text):
|
||||
+ message = message_from_string(
|
||||
+ "Header: Value\r\n\r\nBody",
|
||||
+ policy=self.policy,
|
||||
+ )
|
||||
+
|
||||
+ del message['Header']
|
||||
+ message['Header'] = LiteralHeader(text)
|
||||
+
|
||||
+ with self.assertRaises(email.errors.HeaderWriteError):
|
||||
+ message.as_string()
|
||||
+
|
||||
|
||||
class TestBytesGenerator(TestGeneratorBase, TestEmailBase):
|
||||
|
||||
--- a/Lib/test/test_email/test_policy.py
|
||||
+++ b/Lib/test/test_email/test_policy.py
|
||||
@@ -26,6 +26,7 @@ class PolicyAPITests(unittest.TestCase):
|
||||
'raise_on_defect': False,
|
||||
'mangle_from_': True,
|
||||
'message_factory': None,
|
||||
+ 'verify_generated_headers': True,
|
||||
}
|
||||
# These default values are the ones set on email.policy.default.
|
||||
# If any of these defaults change, the docs must be updated.
|
||||
@@ -294,6 +295,31 @@ class PolicyAPITests(unittest.TestCase):
|
||||
with self.assertRaises(email.errors.HeaderParseError):
|
||||
policy.fold("Subject", subject)
|
||||
|
||||
+ def test_verify_generated_headers(self):
|
||||
+ """Turning protection off allows header injection"""
|
||||
+ policy = email.policy.default.clone(verify_generated_headers=False)
|
||||
+ for text in (
|
||||
+ 'Header: Value\r\nBad: Injection\r\n',
|
||||
+ 'Header: NoNewLine'
|
||||
+ ):
|
||||
+ with self.subTest(text=text):
|
||||
+ message = email.message_from_string(
|
||||
+ "Header: Value\r\n\r\nBody",
|
||||
+ policy=policy,
|
||||
+ )
|
||||
+ class LiteralHeader(str):
|
||||
+ name = 'Header'
|
||||
+ def fold(self, **kwargs):
|
||||
+ return self
|
||||
+
|
||||
+ del message['Header']
|
||||
+ message['Header'] = LiteralHeader(text)
|
||||
+
|
||||
+ self.assertEqual(
|
||||
+ message.as_string(),
|
||||
+ f"{text}\nBody",
|
||||
+ )
|
||||
+
|
||||
# XXX: Need subclassing tests.
|
||||
# For adding subclassed objects, make sure the usual rules apply (subclass
|
||||
# wins), but that the order still works (right overrides left).
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
|
||||
@@ -0,0 +1,5 @@
|
||||
+:mod:`email` headers with embedded newlines are now quoted on output. The
|
||||
+:mod:`~email.generator` will now refuse to serialize (write) headers that
|
||||
+are unsafely folded or delimited; see
|
||||
+:attr:`~email.policy.Policy.verify_generated_headers`. (Contributed by Bas
|
||||
+Bloemsaat and Petr Viktorin in :gh:`121650`.)
|
@ -1,125 +0,0 @@
|
||||
From dcfd909737995b948b99810a28205835a6c2848e Mon Sep 17 00:00:00 2001
|
||||
From: Serhiy Storchaka <storchaka@gmail.com>
|
||||
Date: Sat, 17 Aug 2024 16:30:52 +0300
|
||||
Subject: [PATCH] gh-123067: Fix quadratic complexity in parsing "-quoted
|
||||
cookie values with backslashes (GH-123075)
|
||||
|
||||
This fixes CVE-2024-7592.
|
||||
(cherry picked from commit 44e458357fca05ca0ae2658d62c8c595b048b5ef)
|
||||
|
||||
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
|
||||
---
|
||||
Lib/http/cookies.py | 34 ++------
|
||||
Lib/test/test_http_cookies.py | 38 ++++++++++
|
||||
Misc/NEWS.d/next/Library/2024-08-16-19-13-21.gh-issue-123067.Nx9O4R.rst | 1
|
||||
3 files changed, 47 insertions(+), 26 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2024-08-16-19-13-21.gh-issue-123067.Nx9O4R.rst
|
||||
|
||||
--- a/Lib/http/cookies.py
|
||||
+++ b/Lib/http/cookies.py
|
||||
@@ -184,8 +184,13 @@ def _quote(str):
|
||||
return '"' + str.translate(_Translator) + '"'
|
||||
|
||||
|
||||
-_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
|
||||
-_QuotePatt = re.compile(r"[\\].")
|
||||
+_unquote_sub = re.compile(r'\\(?:([0-3][0-7][0-7])|(.))').sub
|
||||
+
|
||||
+def _unquote_replace(m):
|
||||
+ if m[1]:
|
||||
+ return chr(int(m[1], 8))
|
||||
+ else:
|
||||
+ return m[2]
|
||||
|
||||
def _unquote(str):
|
||||
# If there aren't any doublequotes,
|
||||
@@ -205,30 +210,7 @@ def _unquote(str):
|
||||
# \012 --> \n
|
||||
# \" --> "
|
||||
#
|
||||
- i = 0
|
||||
- n = len(str)
|
||||
- res = []
|
||||
- while 0 <= i < n:
|
||||
- o_match = _OctalPatt.search(str, i)
|
||||
- q_match = _QuotePatt.search(str, i)
|
||||
- if not o_match and not q_match: # Neither matched
|
||||
- res.append(str[i:])
|
||||
- break
|
||||
- # else:
|
||||
- j = k = -1
|
||||
- if o_match:
|
||||
- j = o_match.start(0)
|
||||
- if q_match:
|
||||
- k = q_match.start(0)
|
||||
- if q_match and (not o_match or k < j): # QuotePatt matched
|
||||
- res.append(str[i:k])
|
||||
- res.append(str[k+1])
|
||||
- i = k + 2
|
||||
- else: # OctalPatt matched
|
||||
- res.append(str[i:j])
|
||||
- res.append(chr(int(str[j+1:j+4], 8)))
|
||||
- i = j + 4
|
||||
- return _nulljoin(res)
|
||||
+ return _unquote_sub(_unquote_replace, str)
|
||||
|
||||
# The _getdate() routine is used to set the expiration time in the cookie's HTTP
|
||||
# header. By default, _getdate() returns the current time in the appropriate
|
||||
--- a/Lib/test/test_http_cookies.py
|
||||
+++ b/Lib/test/test_http_cookies.py
|
||||
@@ -5,6 +5,7 @@ import unittest
|
||||
import doctest
|
||||
from http import cookies
|
||||
import pickle
|
||||
+from test import support
|
||||
|
||||
|
||||
class CookieTests(unittest.TestCase):
|
||||
@@ -58,6 +59,43 @@ class CookieTests(unittest.TestCase):
|
||||
for k, v in sorted(case['dict'].items()):
|
||||
self.assertEqual(C[k].value, v)
|
||||
|
||||
+ def test_unquote(self):
|
||||
+ cases = [
|
||||
+ (r'a="b=\""', 'b="'),
|
||||
+ (r'a="b=\\"', 'b=\\'),
|
||||
+ (r'a="b=\="', 'b=='),
|
||||
+ (r'a="b=\n"', 'b=n'),
|
||||
+ (r'a="b=\042"', 'b="'),
|
||||
+ (r'a="b=\134"', 'b=\\'),
|
||||
+ (r'a="b=\377"', 'b=\xff'),
|
||||
+ (r'a="b=\400"', 'b=400'),
|
||||
+ (r'a="b=\42"', 'b=42'),
|
||||
+ (r'a="b=\\042"', 'b=\\042'),
|
||||
+ (r'a="b=\\134"', 'b=\\134'),
|
||||
+ (r'a="b=\\\""', 'b=\\"'),
|
||||
+ (r'a="b=\\\042"', 'b=\\"'),
|
||||
+ (r'a="b=\134\""', 'b=\\"'),
|
||||
+ (r'a="b=\134\042"', 'b=\\"'),
|
||||
+ ]
|
||||
+ for encoded, decoded in cases:
|
||||
+ with self.subTest(encoded):
|
||||
+ C = cookies.SimpleCookie()
|
||||
+ C.load(encoded)
|
||||
+ self.assertEqual(C['a'].value, decoded)
|
||||
+
|
||||
+ @support.requires_resource('cpu')
|
||||
+ def test_unquote_large(self):
|
||||
+ n = 10**6
|
||||
+ for encoded in r'\\', r'\134':
|
||||
+ with self.subTest(encoded):
|
||||
+ data = 'a="b=' + encoded*n + ';"'
|
||||
+ C = cookies.SimpleCookie()
|
||||
+ C.load(data)
|
||||
+ value = C['a'].value
|
||||
+ self.assertEqual(value[:3], 'b=\\')
|
||||
+ self.assertEqual(value[-2:], '\\;')
|
||||
+ self.assertEqual(len(value), n + 3)
|
||||
+
|
||||
def test_load(self):
|
||||
C = cookies.SimpleCookie()
|
||||
C.load('Customer="WILE_E_COYOTE"; Version=1; Path=/acme')
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-08-16-19-13-21.gh-issue-123067.Nx9O4R.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix quadratic complexity in parsing ``"``-quoted cookie values with backslashes by :mod:`http.cookies`.
|
@ -1,122 +0,0 @@
|
||||
From 0d480f2766db5313cf311c4f7ec3fd6f9e09615f Mon Sep 17 00:00:00 2001
|
||||
From: "Jason R. Coombs" <jaraco@jaraco.com>
|
||||
Date: Sun, 11 Aug 2024 19:48:50 -0400
|
||||
Subject: [PATCH 1/2] gh-122905: Sanitize names in zipfile.Path. (#122906)
|
||||
|
||||
Ported from zipp 3.19.1; ref jaraco/zipp#119.
|
||||
|
||||
(cherry picked from commit 9cd03263100ddb1657826cc4a71470786cab3932)
|
||||
---
|
||||
Lib/test/test_zipfile.py | 17 ++
|
||||
Lib/zipfile.py | 61 +++++++++-
|
||||
Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst | 1
|
||||
3 files changed, 78 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst
|
||||
|
||||
--- a/Lib/test/test_zipfile.py
|
||||
+++ b/Lib/test/test_zipfile.py
|
||||
@@ -3660,6 +3660,23 @@ with zipfile.ZipFile(io.BytesIO(), "w")
|
||||
zipfile.Path(zf)
|
||||
zf.extractall(source_path.parent)
|
||||
|
||||
+ def test_malformed_paths(self):
|
||||
+ """
|
||||
+ Path should handle malformed paths.
|
||||
+ """
|
||||
+ data = io.BytesIO()
|
||||
+ zf = zipfile.ZipFile(data, "w")
|
||||
+ zf.writestr("/one-slash.txt", b"content")
|
||||
+ zf.writestr("//two-slash.txt", b"content")
|
||||
+ zf.writestr("../parent.txt", b"content")
|
||||
+ zf.filename = ''
|
||||
+ root = zipfile.Path(zf)
|
||||
+ assert list(map(str, root.iterdir())) == [
|
||||
+ 'one-slash.txt',
|
||||
+ 'two-slash.txt',
|
||||
+ 'parent.txt',
|
||||
+ ]
|
||||
+
|
||||
|
||||
class EncodedMetadataTests(unittest.TestCase):
|
||||
file_names = ['\u4e00', '\u4e8c', '\u4e09'] # Han 'one', 'two', 'three'
|
||||
--- a/Lib/zipfile.py
|
||||
+++ b/Lib/zipfile.py
|
||||
@@ -9,6 +9,7 @@ import io
|
||||
import itertools
|
||||
import os
|
||||
import posixpath
|
||||
+import re
|
||||
import shutil
|
||||
import stat
|
||||
import struct
|
||||
@@ -2245,7 +2246,65 @@ def _difference(minuend, subtrahend):
|
||||
return itertools.filterfalse(set(subtrahend).__contains__, minuend)
|
||||
|
||||
|
||||
-class CompleteDirs(ZipFile):
|
||||
+class SanitizedNames:
|
||||
+ """
|
||||
+ ZipFile mix-in to ensure names are sanitized.
|
||||
+ """
|
||||
+
|
||||
+ def namelist(self):
|
||||
+ return list(map(self._sanitize, super().namelist()))
|
||||
+
|
||||
+ @staticmethod
|
||||
+ def _sanitize(name):
|
||||
+ r"""
|
||||
+ Ensure a relative path with posix separators and no dot names.
|
||||
+ Modeled after
|
||||
+ https://github.com/python/cpython/blob/bcc1be39cb1d04ad9fc0bd1b9193d3972835a57c/Lib/zipfile/__init__.py#L1799-L1813
|
||||
+ but provides consistent cross-platform behavior.
|
||||
+ >>> san = SanitizedNames._sanitize
|
||||
+ >>> san('/foo/bar')
|
||||
+ 'foo/bar'
|
||||
+ >>> san('//foo.txt')
|
||||
+ 'foo.txt'
|
||||
+ >>> san('foo/.././bar.txt')
|
||||
+ 'foo/bar.txt'
|
||||
+ >>> san('foo../.bar.txt')
|
||||
+ 'foo../.bar.txt'
|
||||
+ >>> san('\\foo\\bar.txt')
|
||||
+ 'foo/bar.txt'
|
||||
+ >>> san('D:\\foo.txt')
|
||||
+ 'D/foo.txt'
|
||||
+ >>> san('\\\\server\\share\\file.txt')
|
||||
+ 'server/share/file.txt'
|
||||
+ >>> san('\\\\?\\GLOBALROOT\\Volume3')
|
||||
+ '?/GLOBALROOT/Volume3'
|
||||
+ >>> san('\\\\.\\PhysicalDrive1\\root')
|
||||
+ 'PhysicalDrive1/root'
|
||||
+ Retain any trailing slash.
|
||||
+ >>> san('abc/')
|
||||
+ 'abc/'
|
||||
+ Raises a ValueError if the result is empty.
|
||||
+ >>> san('../..')
|
||||
+ Traceback (most recent call last):
|
||||
+ ...
|
||||
+ ValueError: Empty filename
|
||||
+ """
|
||||
+
|
||||
+ def allowed(part):
|
||||
+ return part and part not in {'..', '.'}
|
||||
+
|
||||
+ # Remove the drive letter.
|
||||
+ # Don't use ntpath.splitdrive, because that also strips UNC paths
|
||||
+ bare = re.sub('^([A-Z]):', r'\1', name, flags=re.IGNORECASE)
|
||||
+ clean = bare.replace('\\', '/')
|
||||
+ parts = clean.split('/')
|
||||
+ joined = '/'.join(filter(allowed, parts))
|
||||
+ if not joined:
|
||||
+ raise ValueError("Empty filename")
|
||||
+ return joined + '/' * name.endswith('/')
|
||||
+
|
||||
+
|
||||
+class CompleteDirs(SanitizedNames, ZipFile):
|
||||
"""
|
||||
A ZipFile subclass that ensures that implied directories
|
||||
are always included in the namelist.
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst
|
||||
@@ -0,0 +1 @@
|
||||
+:class:`zipfile.Path` objects now sanitize names from the zipfile.
|
@ -60,3 +60,156 @@ Index: Python-3.11.8/Lib/site.py
|
||||
for sitedir in getsitepackages(prefixes):
|
||||
if os.path.isdir(sitedir):
|
||||
addsitedir(sitedir, known_paths)
|
||||
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
|
||||
Date: Mon, 15 Feb 2021 12:19:27 +0100
|
||||
Subject: [PATCH] 00251: Change user install location
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Set values of base and platbase in sysconfig from /usr
|
||||
to /usr/local when RPM build is not detected
|
||||
to make pip and similar tools install into separate location.
|
||||
|
||||
Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
Downstream only.
|
||||
|
||||
We've tried to rework in Fedora 36/Python 3.10 to follow https://bugs.python.org/issue43976
|
||||
but we have identified serious problems with that approach,
|
||||
see https://bugzilla.redhat.com/2026979 or https://bugzilla.redhat.com/2097183
|
||||
|
||||
pypa/distutils integration: https://github.com/pypa/distutils/pull/70
|
||||
|
||||
Co-authored-by: Petr Viktorin <encukou@gmail.com>
|
||||
Co-authored-by: Miro Hrončok <miro@hroncok.cz>
|
||||
Co-authored-by: Michal Cyprian <m.cyprian@gmail.com>
|
||||
Co-authored-by: Lumír Balhar <frenzy.madness@gmail.com>
|
||||
---
|
||||
Lib/site.py | 9 ++++++-
|
||||
Lib/sysconfig.py | 49 +++++++++++++++++++++++++++++++++++++-
|
||||
Lib/test/test_sysconfig.py | 17 +++++++++++--
|
||||
3 files changed, 71 insertions(+), 4 deletions(-)
|
||||
|
||||
Index: Python-3.11.9/Lib/sysconfig.py
|
||||
===================================================================
|
||||
--- Python-3.11.9.orig/Lib/sysconfig.py
|
||||
+++ Python-3.11.9/Lib/sysconfig.py
|
||||
@@ -103,6 +103,11 @@ if os.name == 'nt':
|
||||
else:
|
||||
_INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv']
|
||||
|
||||
+# For a brief period of time in the Fedora 36 life cycle,
|
||||
+# this installation scheme existed and was documented in the release notes.
|
||||
+# For backwards compatibility, we keep it here (at least on 3.10 and 3.11).
|
||||
+_INSTALL_SCHEMES['rpm_prefix'] = _INSTALL_SCHEMES['posix_prefix']
|
||||
+
|
||||
|
||||
# NOTE: site.py has copy of this function.
|
||||
# Sync it when modify this function.
|
||||
@@ -162,6 +167,19 @@ if _HAS_USER_BASE:
|
||||
},
|
||||
}
|
||||
|
||||
+# This is used by distutils.command.install in the stdlib
|
||||
+# as well as pypa/distutils (e.g. bundled in setuptools).
|
||||
+# The self.prefix value is set to sys.prefix + /local/
|
||||
+# if neither RPM build nor virtual environment is
|
||||
+# detected to make distutils install packages
|
||||
+# into the separate location.
|
||||
+# https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+if (not (hasattr(sys, 'real_prefix') or
|
||||
+ sys.prefix != sys.base_prefix) and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ _prefix_addition = '/local'
|
||||
+
|
||||
+
|
||||
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
|
||||
'scripts', 'data')
|
||||
|
||||
@@ -258,11 +276,40 @@ def _extend_dict(target_dict, other_dict
|
||||
target_dict[key] = value
|
||||
|
||||
|
||||
+_CONFIG_VARS_LOCAL = None
|
||||
+
|
||||
+
|
||||
+def _config_vars_local():
|
||||
+ # This function returns the config vars with prefixes amended to /usr/local
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ global _CONFIG_VARS_LOCAL
|
||||
+ if _CONFIG_VARS_LOCAL is None:
|
||||
+ _CONFIG_VARS_LOCAL = dict(get_config_vars())
|
||||
+ _CONFIG_VARS_LOCAL['base'] = '/usr/local'
|
||||
+ _CONFIG_VARS_LOCAL['platbase'] = '/usr/local'
|
||||
+ return _CONFIG_VARS_LOCAL
|
||||
+
|
||||
+
|
||||
def _expand_vars(scheme, vars):
|
||||
res = {}
|
||||
if vars is None:
|
||||
vars = {}
|
||||
- _extend_dict(vars, get_config_vars())
|
||||
+
|
||||
+ # when we are not in a virtual environment or an RPM build
|
||||
+ # we change '/usr' to '/usr/local'
|
||||
+ # to avoid surprises, we explicitly check for the /usr/ prefix
|
||||
+ # Python virtual environments have different prefixes
|
||||
+ # we only do this for posix_prefix, not to mangle the venv scheme
|
||||
+ # posix_prefix is used by sudo pip install
|
||||
+ # we only change the defaults here, so explicit --prefix will take precedence
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ if (scheme == 'posix_prefix' and
|
||||
+ _PREFIX == '/usr' and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ _extend_dict(vars, _config_vars_local())
|
||||
+ else:
|
||||
+ _extend_dict(vars, get_config_vars())
|
||||
+
|
||||
if os.name == 'nt':
|
||||
# On Windows we want to substitute 'lib' for schemes rather
|
||||
# than the native value (without modifying vars, in case it
|
||||
Index: Python-3.11.9/Lib/test/test_sysconfig.py
|
||||
===================================================================
|
||||
--- Python-3.11.9.orig/Lib/test/test_sysconfig.py
|
||||
+++ Python-3.11.9/Lib/test/test_sysconfig.py
|
||||
@@ -111,8 +111,19 @@ class TestSysConfig(unittest.TestCase):
|
||||
for scheme in _INSTALL_SCHEMES:
|
||||
for name in _INSTALL_SCHEMES[scheme]:
|
||||
expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars)
|
||||
+ tested = get_path(name, scheme)
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ if tested.startswith('/usr/local'):
|
||||
+ # /usr/local should only be used in posix_prefix
|
||||
+ self.assertEqual(scheme, 'posix_prefix')
|
||||
+ # Fedora CI runs tests for venv and virtualenv that check for other prefixes
|
||||
+ self.assertEqual(sys.prefix, '/usr')
|
||||
+ # When building the RPM of Python, %check runs this with RPM_BUILD_ROOT set
|
||||
+ # Fedora CI runs this with RPM_BUILD_ROOT unset
|
||||
+ self.assertNotIn('RPM_BUILD_ROOT', os.environ)
|
||||
+ tested = tested.replace('/usr/local', '/usr')
|
||||
self.assertEqual(
|
||||
- os.path.normpath(get_path(name, scheme)),
|
||||
+ os.path.normpath(tested),
|
||||
os.path.normpath(expected),
|
||||
)
|
||||
|
||||
@@ -345,7 +356,7 @@ class TestSysConfig(unittest.TestCase):
|
||||
self.assertTrue(os.path.isfile(config_h), config_h)
|
||||
|
||||
def test_get_scheme_names(self):
|
||||
- wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv']
|
||||
+ wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv', 'rpm_prefix']
|
||||
if HAS_USER_BASE:
|
||||
wanted.extend(['nt_user', 'osx_framework_user', 'posix_user'])
|
||||
self.assertEqual(get_scheme_names(), tuple(sorted(wanted)))
|
||||
@@ -357,6 +368,8 @@ class TestSysConfig(unittest.TestCase):
|
||||
cmd = "-c", "import sysconfig; print(sysconfig.get_platform())"
|
||||
self.assertEqual(py.call_real(*cmd), py.call_link(*cmd))
|
||||
|
||||
+ @unittest.skipIf('RPM_BUILD_ROOT' not in os.environ,
|
||||
+ "Test doesn't expect Fedora's paths")
|
||||
def test_user_similar(self):
|
||||
# Issue #8759: make sure the posix scheme for the users
|
||||
# is similar to the global posix_prefix one
|
||||
|
BIN
Python-3.11.11.tar.xz
(Stored with Git LFS)
Normal file
BIN
Python-3.11.11.tar.xz
(Stored with Git LFS)
Normal file
Binary file not shown.
1
Python-3.11.11.tar.xz.sigstore
Normal file
1
Python-3.11.11.tar.xz.sigstore
Normal file
@ -0,0 +1 @@
|
||||
{"mediaType": "application/vnd.dev.sigstore.bundle.v0.3+json", "verificationMaterial": {"certificate": {"rawBytes": "MIICzDCCAlOgAwIBAgIUWcRolJPsPmtJKA6VkjHSj7PtjY8wCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjQxMjAzMTgxOTA1WhcNMjQxMjAzMTgyOTA1WjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEN+fBJCRZIAZiWPBdjyQVD+x5vgmjuuVct1HkPHIBMuEe7wI4mBG2BhJ3fHkpr97efIH6ELMmPV99edyAIZFXR6OCAXIwggFuMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUkutZdHfCCSvGI87mjnBRwZB8ihYwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wIgYDVR0RAQH/BBgwFoEUcGFibG9nc2FsQHB5dGhvbi5vcmcwKQYKKwYBBAGDvzABAQQbaHR0cHM6Ly9hY2NvdW50cy5nb29nbGUuY29tMCsGCisGAQQBg78wAQgEHQwbaHR0cHM6Ly9hY2NvdW50cy5nb29nbGUuY29tMIGKBgorBgEEAdZ5AgQCBHwEegB4AHYA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGTjb9QlwAABAMARzBFAiEA4ddSINhYM+p0+DGzRqnA8rVtJF9YgI+9znXiq9fqQNkCIEErcSnQmN8jjErhwWWtcTM5GgH4ka/uk5kdHTycwxj3MAoGCCqGSM49BAMDA2cAMGQCMFmkCEH2pCBpFeFiUi2uA4opcJP6vh/zqb+D0tbxqd+jwbBkuDxDqA9/Ao3UWop+twIwO9o71KAlYWPSPYMeZERM4R8zWlp9mVJPiK3tgOJJi40MNmxwtfsQeQtncqiQLBAH"}, "tlogEntries": [{"logIndex": "153122039", "logId": {"keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="}, "kindVersion": {"kind": "hashedrekord", "version": "0.0.1"}, "integratedTime": "1733249946", "inclusionPromise": {"signedEntryTimestamp": "MEUCIBL9zpVJtljIuZtAe8uptLfDmakmbAjy5ELp2q8WJTQ7AiEAv6lIpyJZycHwTS+JHYJFzMVv0SmA8yQ0eMneBivMhPY="}, "inclusionProof": {"logIndex": "31217777", "rootHash": "BMKHBPePzSbNqf2NyF/Ejuyy3troRGpNS41Dqe43nZ0=", "treeSize": "31217778", "hashes": ["lrr8dxmtgD09fnZTo1tMTY00HNKc2ZIpbZa1djDeTes=", "yFxGSg1RDbtZ/eNftnMdBJGNEZmmLyx2ZRDFtAIMHAk=", "GeqsQGnvgc+gcuaIC+vQ5b0RdTyBxBnYTpbeW2AeD+Q=", "dMTPeN/a9xCQQP+Hz7sddW0pPj8n54sfkhcf3XhjrMM=", "XjayhjKU3shP7q7lhmhKDv3Vpi4gJgAPCu0KlEzc9Qo=", "go1dmexQYS5etu69upRRX7IFvuA0rIcT9aYjMstmPIU=", "AYwr74Bm2w383UnS7DdbZUUAhusq28JoxKpWrQ7OvGQ=", "u+yWmGIR6sAH32wiSy22mz1Yf+jfPdBTjFbyRISuTZw=", "3eFC7Gp4fWecybDOAw9uUTrM1xB7YRYRAGsfYkiQbV8=", "1uKk2qjOliHMiTk906jrchP8mXWsRG8apaU1sa0lfh0=", "oOecFfN3YqDOkbijS/ej1WF5Da/Gt/AZNhbwE9uoOE8=", "4lUF0YOu9XkIDXKXA0wMSzd6VeDY3TZAgmoOeWmS2+Y=", "gf+9m552B3PnkWnO0o4KdVvjcT3WVHLrCbf1DoVYKFw="], "checkpoint": {"envelope": "rekor.sigstore.dev - 1193050959916656506\n31217778\nBMKHBPePzSbNqf2NyF/Ejuyy3troRGpNS41Dqe43nZ0=\n\n\u2014 rekor.sigstore.dev wNI9ajBEAiA7ed0HqugBwVpmxDAR1VN35J91/+DeRdj09y5lFY+bRwIgYe07JnZlJvp3MfAMXX3i4XBsZoDRZoXtwfBaRj/8x8s=\n"}}, "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyYTk5MjBjN2EwY2QyMzZkZTMzNjQ0ZWQ5ODBhMTNjYmJjMjEwNThiZmRjNTI4ZmViYjYwODE1NzVlZDczYmUzIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FVUNJRU5KSGllaWs5WkVibW83a0p4ZUhWY2FvVDVYOUxyWG1zRTVxc1I5R1JpSEFpRUFtcHZyV21vUHF5YzRpQ09VYXVmY3dKTllMK1lPTWU0b0NOaWRLVGduT1FBPSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCRFJWSlVTVVpKUTBGVVJTMHRMUzB0Q2sxSlNVTjZSRU5EUVd4UFowRjNTVUpCWjBsVlYyTlNiMnhLVUhOUWJYUktTMEUyVm10cVNGTnFOMUIwYWxrNGQwTm5XVWxMYjFwSmVtb3dSVUYzVFhjS1RucEZWazFDVFVkQk1WVkZRMmhOVFdNeWJHNWpNMUoyWTIxVmRWcEhWakpOVWpSM1NFRlpSRlpSVVVSRmVGWjZZVmRrZW1SSE9YbGFVekZ3WW01U2JBcGpiVEZzV2tkc2FHUkhWWGRJYUdOT1RXcFJlRTFxUVhwTlZHZDRUMVJCTVZkb1kwNU5hbEY0VFdwQmVrMVVaM2xQVkVFeFYycEJRVTFHYTNkRmQxbElDa3R2V2tsNmFqQkRRVkZaU1V0dldrbDZhakJFUVZGalJGRm5RVVZPSzJaQ1NrTlNXa2xCV21sWFVFSmthbmxSVmtRcmVEVjJaMjFxZFhWV1kzUXhTR3NLVUVoSlFrMTFSV1UzZDBrMGJVSkhNa0pvU2pObVNHdHdjamszWldaSlNEWkZURTF0VUZZNU9XVmtlVUZKV2taWVVqWlBRMEZZU1hkblowWjFUVUUwUndwQk1WVmtSSGRGUWk5M1VVVkJkMGxJWjBSQlZFSm5UbFpJVTFWRlJFUkJTMEpuWjNKQ1owVkdRbEZqUkVGNlFXUkNaMDVXU0ZFMFJVWm5VVlZyZFhSYUNtUklaa05EVTNaSFNUZzNiV3B1UWxKM1drSTRhV2haZDBoM1dVUldVakJxUWtKbmQwWnZRVlV6T1ZCd2VqRlphMFZhWWpWeFRtcHdTMFpYYVhocE5Ga0tXa1E0ZDBsbldVUldVakJTUVZGSUwwSkNaM2RHYjBWVlkwZEdhV0pIT1c1ak1rWnpVVWhDTldSSGFIWmlhVFYyWTIxamQwdFJXVXRMZDFsQ1FrRkhSQXAyZWtGQ1FWRlJZbUZJVWpCalNFMDJUSGs1YUZreVRuWmtWelV3WTNrMWJtSXlPVzVpUjFWMVdUSTVkRTFEYzBkRGFYTkhRVkZSUW1jM09IZEJVV2RGQ2toUmQySmhTRkl3WTBoTk5reDVPV2haTWs1MlpGYzFNR041Tlc1aU1qbHVZa2RWZFZreU9YUk5TVWRMUW1kdmNrSm5SVVZCWkZvMVFXZFJRMEpJZDBVS1pXZENORUZJV1VFelZEQjNZWE5pU0VWVVNtcEhValJqYlZkak0wRnhTa3RZY21wbFVFc3pMMmcwY0hsblF6aHdOMjgwUVVGQlIxUnFZamxSYkhkQlFRcENRVTFCVW5wQ1JrRnBSVUUwWkdSVFNVNW9XVTByY0RBclJFZDZVbkZ1UVRoeVZuUktSamxaWjBrck9YcHVXR2x4T1daeFVVNXJRMGxGUlhKalUyNVJDbTFPT0dwcVJYSm9kMWRYZEdOVVRUVkhaMGcwYTJFdmRXczFhMlJJVkhsamQzaHFNMDFCYjBkRFEzRkhVMDAwT1VKQlRVUkJNbU5CVFVkUlEwMUdiV3NLUTBWSU1uQkRRbkJHWlVacFZXa3lkVUUwYjNCalNsQTJkbWd2ZW5GaUswUXdkR0o0Y1dRcmFuZGlRbXQxUkhoRWNVRTVMMEZ2TTFWWGIzQXJkSGRKZHdwUE9XODNNVXRCYkZsWFVGTlFXVTFsV2tWU1RUUlNPSHBYYkhBNWJWWktVR2xMTTNSblQwcEthVFF3VFU1dGVIZDBabk5SWlZGMGJtTnhhVkZNUWtGSUNpMHRMUzB0UlU1RUlFTkZVbFJKUmtsRFFWUkZMUzB0TFMwSyJ9fX19"}]}, "messageSignature": {"messageDigest": {"algorithm": "SHA2_256", "digest": "Kpkgx6DNI23jNkTtmAoTy7whBYv9xSj+u2CBV17XO+M="}, "signature": "MEUCIENJHieik9ZEbmo7kJxeHVcaoT5X9LrXmsE5qsR9GRiHAiEAmpvrWmoPqyc4iCOUaufcwJNYL+YOMe4oCNidKTgnOQA="}}
|
BIN
Python-3.11.8.tar.xz
(Stored with Git LFS)
BIN
Python-3.11.8.tar.xz
(Stored with Git LFS)
Binary file not shown.
@ -1,16 +0,0 @@
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAABCAAdFiEEz9yiRbEEPPKl+Xhl/+h0BBaL2EcFAmXCppEACgkQ/+h0BBaL
|
||||
2Edi6g//dRagLHlrmPyCrch7ZqAazLMXTHb3cerXg41QEqfwIl7osk1HnqObBgVN
|
||||
w8vgXy9ZlxWwv+cWvwrNLY1AWEfarhwRzWLkikHwycBIIgep1HmSvyU4wLKaN7mI
|
||||
c/LxGHfQZ6suu3gCVmRFBoB/ACpT0P5qvDpoUehrADE6wCqs0vbRiW/InLCTUpOy
|
||||
zZ+5ncK302JtafJkjIGf2VNB4yQATk/v7fO/z43sEQqhvzgtlWlXNmtCKshGBIt1
|
||||
mJpLEs8gCq97jObfbN7FkC3Ti/kEan7PbjDzsDKcBv/jJudvWywHtMzplgbjtOYG
|
||||
AgBM8bXbVC119BwmfBpvAxgsVKmmGi9d2McJUPOcIHKiHCb17fU0srRbSV47rE9N
|
||||
PWEHgQC2ICbdT9N1oimOEp16eYt5omFWfDy5C91oqUnBFtz8wqiNmyeQimegMgBe
|
||||
cDpOY73C2H7Vi6rX9EbyrG+LOkfJ6Vt5rTCa+zbAPy2ihz/ajA7UNH72t1uuzFQZ
|
||||
pPdUBNhtGxr5EB3zAqBxDuoh9DMOmDZACbT+npHR3Y7KaXTHYIe7Ot8CCrLpH+Ra
|
||||
8Yt6/CCD7KnsCWz6pfyH+ulIL4vw+dPnC809+neiXhiUuM5qiIr9K7HidzXi0Lwj
|
||||
sb8MVErS8dURFZP48e1dfbyJqsAvAosiGmjDDqbrlAC5attKjg8=
|
||||
=VFx6
|
||||
-----END PGP SIGNATURE-----
|
25
add-loongarch64-support.patch
Normal file
25
add-loongarch64-support.patch
Normal file
@ -0,0 +1,25 @@
|
||||
Description: Add platform triplets for LoongArch.
|
||||
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -976,6 +976,20 @@ cat > conftest.c <<EOF
|
||||
hppa-linux-gnu
|
||||
# elif defined(__ia64__)
|
||||
ia64-linux-gnu
|
||||
+# elif defined(__loongarch__)
|
||||
+# if defined(__loongarch_lp64)
|
||||
+# if defined(__loongarch_soft_float)
|
||||
+ loongarch64-linux-gnusf
|
||||
+# elif defined(__loongarch_single_float)
|
||||
+ loongarch64-linux-gnuf32
|
||||
+# elif defined(__loongarch_double_float)
|
||||
+ loongarch64-linux-gnu
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
# elif defined(__m68k__) && !defined(__mcoldfire__)
|
||||
m68k-linux-gnu
|
||||
# elif defined(__mips_hard_float) && defined(__mips_isa_rev) && (__mips_isa_rev >=6) && defined(_MIPSEL)
|
37
bso1227999-reproducible-builds.patch
Normal file
37
bso1227999-reproducible-builds.patch
Normal file
@ -0,0 +1,37 @@
|
||||
From ac2b8869724d7a57d9b5efbdce2f20423214e8bb Mon Sep 17 00:00:00 2001
|
||||
From: "Bernhard M. Wiedemann" <bwiedemann@suse.de>
|
||||
Date: Tue, 16 Jul 2024 21:39:33 +0200
|
||||
Subject: [PATCH] Allow to override build date with SOURCE_DATE_EPOCH
|
||||
|
||||
to make builds reproducible.
|
||||
See https://reproducible-builds.org/ for why this is good
|
||||
and https://reproducible-builds.org/specs/source-date-epoch/
|
||||
for the definition of this variable.
|
||||
---
|
||||
Doc/conf.py | 3 ++-
|
||||
Doc/library/functions.rst | 2 +-
|
||||
2 files changed, 3 insertions(+), 2 deletions(-)
|
||||
|
||||
--- a/Doc/conf.py
|
||||
+++ b/Doc/conf.py
|
||||
@@ -316,7 +316,8 @@ html_context = {
|
||||
}
|
||||
|
||||
# This 'Last updated on:' timestamp is inserted at the bottom of every page.
|
||||
-html_last_updated_fmt = time.strftime('%b %d, %Y (%H:%M UTC)', time.gmtime())
|
||||
+html_time = int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))
|
||||
+html_last_updated_fmt = time.strftime('%b %d, %Y (%H:%M UTC)', time.gmtime(html_time))
|
||||
|
||||
# Path to find HTML templates.
|
||||
templates_path = ['tools/templates']
|
||||
--- a/Doc/library/functions.rst
|
||||
+++ b/Doc/library/functions.rst
|
||||
@@ -1356,7 +1356,7 @@ are always available. They are listed h
|
||||
(where :func:`open` is declared), :mod:`os`, :mod:`os.path`, :mod:`tempfile`,
|
||||
and :mod:`shutil`.
|
||||
|
||||
- .. audit-event:: open file,mode,flags open
|
||||
+ .. audit-event:: open path,mode,flags open
|
||||
|
||||
The ``mode`` and ``flags`` arguments may have been modified or inferred from
|
||||
the original call.
|
@ -3,11 +3,9 @@
|
||||
Misc/NEWS | 2 +-
|
||||
2 files changed, 1 insertion(+), 4 deletions(-)
|
||||
|
||||
Index: Python-3.11.8/Doc/using/configure.rst
|
||||
===================================================================
|
||||
--- Python-3.11.8.orig/Doc/using/configure.rst
|
||||
+++ Python-3.11.8/Doc/using/configure.rst
|
||||
@@ -41,7 +41,6 @@ General Options
|
||||
--- a/Doc/using/configure.rst
|
||||
+++ b/Doc/using/configure.rst
|
||||
@@ -43,7 +43,6 @@ General Options
|
||||
|
||||
See :data:`sys.int_info.bits_per_digit <sys.int_info>`.
|
||||
|
||||
@ -15,7 +13,7 @@ Index: Python-3.11.8/Doc/using/configure.rst
|
||||
.. option:: --with-cxx-main=COMPILER
|
||||
|
||||
Compile the Python ``main()`` function and link Python executable with C++
|
||||
@@ -527,13 +526,11 @@ macOS Options
|
||||
@@ -529,13 +528,11 @@ macOS Options
|
||||
|
||||
See ``Mac/README.rst``.
|
||||
|
||||
@ -29,11 +27,9 @@ Index: Python-3.11.8/Doc/using/configure.rst
|
||||
.. option:: --enable-framework=INSTALLDIR
|
||||
|
||||
Create a Python.framework rather than a traditional Unix install. Optional
|
||||
Index: Python-3.11.8/Misc/NEWS
|
||||
===================================================================
|
||||
--- Python-3.11.8.orig/Misc/NEWS
|
||||
+++ Python-3.11.8/Misc/NEWS
|
||||
@@ -9411,7 +9411,7 @@ C API
|
||||
--- a/Misc/NEWS
|
||||
+++ b/Misc/NEWS
|
||||
@@ -9774,7 +9774,7 @@ C API
|
||||
- bpo-40939: Removed documentation for the removed ``PyParser_*`` C API.
|
||||
|
||||
- bpo-43795: The list in :ref:`limited-api-list` now shows the public name
|
||||
|
35
gh120226-fix-sendfile-test-kernel-610.patch
Normal file
35
gh120226-fix-sendfile-test-kernel-610.patch
Normal file
@ -0,0 +1,35 @@
|
||||
From 1b3f6523a5c83323cdc44031b33a1c062e5dc698 Mon Sep 17 00:00:00 2001
|
||||
From: Xi Ruoyao <xry111@xry111.site>
|
||||
Date: Fri, 7 Jun 2024 23:51:32 +0800
|
||||
Subject: [PATCH] gh-120226: Fix
|
||||
test_sendfile_close_peer_in_the_middle_of_receiving on Linux >= 6.10
|
||||
(GH-120227)
|
||||
|
||||
The worst case is that the kernel buffers 17 pages with a page size of 64k.
|
||||
(cherry picked from commit a7584245661102a5768c643fbd7db8395fd3c90e)
|
||||
|
||||
Co-authored-by: Xi Ruoyao <xry111@xry111.site>
|
||||
---
|
||||
Lib/test/test_asyncio/test_sendfile.py | 11 ++++-------
|
||||
1 file changed, 4 insertions(+), 7 deletions(-)
|
||||
|
||||
--- a/Lib/test/test_asyncio/test_sendfile.py
|
||||
+++ b/Lib/test/test_asyncio/test_sendfile.py
|
||||
@@ -93,13 +93,10 @@ class MyProto(asyncio.Protocol):
|
||||
|
||||
class SendfileBase:
|
||||
|
||||
- # 256 KiB plus small unaligned to buffer chunk
|
||||
- # Newer versions of Windows seems to have increased its internal
|
||||
- # buffer and tries to send as much of the data as it can as it
|
||||
- # has some form of buffering for this which is less than 256KiB
|
||||
- # on newer server versions and Windows 11.
|
||||
- # So DATA should be larger than 256 KiB to make this test reliable.
|
||||
- DATA = b"x" * (1024 * 256 + 1)
|
||||
+ # Linux >= 6.10 seems buffering up to 17 pages of data.
|
||||
+ # So DATA should be large enough to make this test reliable even with a
|
||||
+ # 64 KiB page configuration.
|
||||
+ DATA = b"x" * (1024 * 17 * 64 + 1)
|
||||
# Reduce socket buffer size to test on relative small data sets.
|
||||
BUF_SIZE = 4 * 1024 # 4 KiB
|
||||
|
@ -1,40 +1,485 @@
|
||||
-------------------------------------------------------------------
|
||||
Thu Sep 19 13:14:43 UTC 2024 - Matej Cepl <mcepl@suse.com>
|
||||
Wed Dec 4 21:40:41 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Add CVE-2024-8088-zipfile-Path-sanitization.patch sanitizing
|
||||
names in zipfile.Path (bsc#1229704, CVE-2024-8088).
|
||||
- Add CVE-2024-6232-ReDOS-backtrack-tarfile.patch removing
|
||||
backtracking when parsing tarfile headers (bsc#1230227,
|
||||
CVE-2024-6232).
|
||||
- Add CVE-2024-7592-quad-complex-cookies.patch fixing quadratic
|
||||
complexity in parsing "-quoted cookie values with backslashes
|
||||
(bsc#1229596, CVE-2024-7592).
|
||||
- Update to 3.11.11:
|
||||
- Tools/Demos
|
||||
- gh-123418: Update GitHub CI workflows to use OpenSSL 3.0.15
|
||||
and multissltests to use 3.0.15, 3.1.7, and 3.2.3.
|
||||
- Tests
|
||||
- gh-125041: Re-enable skipped tests for zlib on the
|
||||
s390x architecture: only skip checks of the compressed
|
||||
bytes, which can be different between zlib’s software
|
||||
implementation and the hardware-accelerated implementation.
|
||||
- Security
|
||||
- gh-126623: Upgrade libexpat to 2.6.4
|
||||
- gh-122792: Changed IPv4-mapped ipaddress.IPv6Address to
|
||||
consistently use the mapped IPv4 address value for deciding
|
||||
properties. Properties which have their behavior fixed are
|
||||
is_multicast, is_reserved, is_link_local, is_global, and
|
||||
is_unspecified.
|
||||
- Library
|
||||
- gh-124651: Properly quote template strings in venv
|
||||
activation scripts (bsc#1232241, CVE-2024-9287).
|
||||
- Removed upstreamed patches:
|
||||
- CVE-2024-9287-venv_path_unquoted.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Sat Aug 3 17:28:26 UTC 2024 - Matej Cepl <mcepl@suse.com>
|
||||
Tue Dec 3 08:21:35 UTC 2024 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Add add-loongarch64-support.patch to support loongarch64
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Dec 2 22:50:07 UTC 2024 - Matej Cepl <mcepl@suse.com>
|
||||
|
||||
- Fix changelog
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Nov 11 12:43:40 UTC 2024 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
|
||||
- Remove -IVendor/ from python-config boo#1231795
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Nov 1 16:32:10 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Add CVE-2024-9287-venv_path_unquoted.patch to properly quote
|
||||
path names provided when creating a virtual environment
|
||||
(bsc#1232241, CVE-2024-9287)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Oct 2 16:18:29 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Drop .pyc files from docdir for reproducible builds
|
||||
(bsc#1230906).
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Sep 9 16:53:07 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Update to 3.11.10:
|
||||
- Security
|
||||
- gh-123678: Upgrade libexpat to 2.6.3
|
||||
- gh-121957: Fixed missing audit events around interactive
|
||||
use of Python, now also properly firing for ``python -i``,
|
||||
as well as for ``python -m asyncio``. The event in question
|
||||
is ``cpython.run_stdin``.
|
||||
- gh-122133: Authenticate the socket connection for the
|
||||
``socket.socketpair()`` fallback on platforms where
|
||||
``AF_UNIX`` is not available like Windows. Patch by
|
||||
Gregory P. Smith <greg@krypto.org> and Seth Larson
|
||||
<seth@python.org>. Reported by Ellie <el@horse64.org>
|
||||
- gh-121285: Remove backtracking from tarfile header parsing
|
||||
for ``hdrcharset``, PAX, and GNU sparse headers
|
||||
(bsc#1230227, CVE-2024-6232).
|
||||
- gh-118486: :func:`os.mkdir` on Windows now accepts
|
||||
*mode* of ``0o700`` to restrict the new directory to
|
||||
the current user. This fixes CVE-2024-4030 affecting
|
||||
:func:`tempfile.mkdtemp` in scenarios where the base
|
||||
temporary directory is more permissive than the default.
|
||||
- gh-116741: Update bundled libexpat to 2.6.2
|
||||
- Library
|
||||
- gh-123270: Applied a more surgical fix for malformed
|
||||
payloads in :class:`zipfile.Path` causing infinite loops
|
||||
(gh-122905) without breaking contents using legitimate
|
||||
characters (bsc#1229704, CVE-2024-8088).
|
||||
- gh-123067: Fix quadratic complexity in parsing ``"``-quoted
|
||||
cookie values with backslashes by :mod:`http.cookies`
|
||||
(bsc#1229596, CVE-2024-7592).
|
||||
- gh-122905: :class:`zipfile.Path` objects now sanitize names
|
||||
from the zipfile.
|
||||
- gh-121650: :mod:`email` headers with embedded newlines are
|
||||
now quoted on output. The :mod:`~email.generator` will now
|
||||
refuse to serialize (write) headers that are unsafely folded
|
||||
or delimited; see :attr:`~email.policy.Policy.verify_generated_headers`.
|
||||
(Contributed by Bas Bloemsaat and Petr Viktorin in
|
||||
:gh:`121650`; CVE-2024-6923, bsc#1228780).
|
||||
- gh-119506: Fix :meth:`!io.TextIOWrapper.write` method
|
||||
breaks internal buffer when the method is called again
|
||||
during flushing internal buffer.
|
||||
- gh-118643: Fix an AttributeError in the :mod:`email` module
|
||||
when re-fold a long address list. Also fix more cases of
|
||||
incorrect encoding of the address separator in the address
|
||||
list.
|
||||
- gh-113171: Fixed various false positives and false
|
||||
negatives in * :attr:`ipaddress.IPv4Address.is_private`
|
||||
(see these docs for details) *
|
||||
:attr:`ipaddress.IPv4Address.is_global` *
|
||||
:attr:`ipaddress.IPv6Address.is_private` *
|
||||
:attr:`ipaddress.IPv6Address.is_global` Also in the
|
||||
corresponding :class:`ipaddress.IPv4Network` and
|
||||
:class:`ipaddress.IPv6Network` attributes.
|
||||
Fixes bsc#1226448 (CVE-2024-4032).
|
||||
- gh-102988: :func:`email.utils.getaddresses` and
|
||||
:func:`email.utils.parseaddr` now return ``('', '')``
|
||||
2-tuples in more situations where invalid email addresses
|
||||
are encountered instead of potentially inaccurate
|
||||
values. Add optional *strict* parameter to these two
|
||||
functions: use ``strict=False`` to get the old behavior,
|
||||
accept malformed inputs. ``getattr(email.utils,
|
||||
'supports_strict_parsing', False)`` can be use to check if
|
||||
the *strict* paramater is available. Patch by Thomas Dwyer
|
||||
and Victor Stinner to improve the CVE-2023-27043 fix
|
||||
(bsc#1210638).
|
||||
- gh-67693: Fix :func:`urllib.parse.urlunparse` and
|
||||
:func:`urllib.parse.urlunsplit` for URIs with path starting
|
||||
with multiple slashes and no authority. Based on patch by
|
||||
Ashwin Ramaswami.
|
||||
- Core and Builtins
|
||||
- gh-112275: A deadlock involving ``pystate.c``'s
|
||||
``HEAD_LOCK`` in ``posixmodule.c`` at fork is now
|
||||
fixed. Patch by ChuBoning based on previous Python 3.12 fix
|
||||
by Victor Stinner.
|
||||
- gh-109120: Added handle of incorrect star expressions, e.g
|
||||
``f(3, *)``. Patch by Grigoryev Semyon
|
||||
- Removed upstreamed patches:
|
||||
- CVE-2023-27043-email-parsing-errors.patch
|
||||
- CVE-2024-4032-private-IP-addrs.patch
|
||||
- CVE-2024-6923-email-hdr-inject.patch
|
||||
- CVE-2024-8088-inf-loop-zipfile_Path.patch
|
||||
(renamed from CVE-2024-8088-zipfile-Path-sanitization.patch)
|
||||
- CVE-2024-6232-ReDOS-backtrack-tarfile.patch
|
||||
- CVE-2024-7592-quad-complex-cookies.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Sep 2 09:44:26 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Add gh120226-fix-sendfile-test-kernel-610.patch to avoid
|
||||
failing test_sendfile_close_peer_in_the_middle_of_receiving
|
||||
tests on Linux >= 6.10 (GH-120227).
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Aug 28 16:54:34 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Add CVE-2024-8088-inf-loop-zipfile_Path.patch to prevent
|
||||
malformed payload to cause infinite loops in zipfile.Path
|
||||
(bsc#1229704, CVE-2024-8088).
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Aug 7 12:12:42 UTC 2024 - Matej Cepl <mcepl@suse.com>
|
||||
|
||||
- bsc#1221854 (CVE-2024-0450) Add
|
||||
CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch
|
||||
detecting the vulnerability of the "quoted-overlap" zipbomb
|
||||
(from gh#python/cpython!110016).
|
||||
- Add CVE-2023-52425-libexpat-2.6.0-backport.patch to fix tests with
|
||||
patched libexpat below 2.6.0 that doesn't update the version number,
|
||||
just in SLE.
|
||||
- Add CVE-2024-4032-private-IP-addrs.patch to fix bsc#1226448
|
||||
(CVE-2024-4032) rearranging definition of private v global IP
|
||||
addresses.
|
||||
- Add CVE-2024-0397-memrace_ssl.SSLContext_cert_store.patch
|
||||
fixing bsc#1226447 (CVE-2024-0397) by removing memory race
|
||||
condition in ssl.SSLContext certificate store methods.
|
||||
- Add CVE-2024-6923-email-hdr-inject.patch to prevent email
|
||||
header injection due to unquoted newlines (bsc#1228780,
|
||||
CVE-2024-6923).
|
||||
- %{profileopt} variable is set according to the variable
|
||||
%{do_profiling} (bsc#1227999)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jul 22 21:20:55 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Remove %suse_update_desktop_file macro as it is not useful any
|
||||
more.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu Jul 18 22:37:07 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Adding bso1227999-reproducible-builds.patch fixing bsc#1227999
|
||||
adding reproducibility patches from gh#python/cpython!121872
|
||||
and gh#python/cpython!121883.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jul 15 12:14:05 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Stop using %%defattr, it seems to be breaking proper executable
|
||||
attributes on /usr/bin/ scripts (bsc#1227378).
|
||||
- Remove included patches:
|
||||
- libexpat260.patch
|
||||
attributes on /usr/bin/ scripts (bsc#1227378).
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Jul 2 10:32:58 UTC 2024 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
|
||||
- Update F00251-change-user-install-location.patch to make pip and
|
||||
modern tools install directly in /usr/local when used by the user.
|
||||
bsc#1225660
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Jun 25 21:57:40 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Add CVE-2024-4032-private-IP-addrs.patch to fix bsc#1226448
|
||||
(CVE-2024-4032) rearranging definition of private v global IP
|
||||
addresses.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed May 1 08:39:08 UTC 2024 - Matej Cepl <mcepl@suse.com>
|
||||
|
||||
- Update CVE-2023-52425-libexpat-2.6.0-backport.patch
|
||||
so that it uses features sniffing, not just
|
||||
comparing version number. Include also
|
||||
support-expat-CVE-2022-25236-patched.patch.
|
||||
- Add CVE-2023-52425-remove-reparse_deferral-tests.patch skipping
|
||||
failing tests.
|
||||
- Refresh patches:
|
||||
- CVE-2023-27043-email-parsing-errors.patch
|
||||
- fix_configure_rst.patch
|
||||
- skip_if_buildbot-extend.patch
|
||||
- Remove included patch:
|
||||
- support-expat-CVE-2022-25236-patched.patch
|
||||
- CVE-2023-52425-remove-reparse_deferral-tests.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Apr 15 10:31:32 UTC 2024 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
|
||||
- Add CVE-2023-52425-libexpat-2.6.0-backport.patch to fix tests with
|
||||
patched libexpat below 2.6.0 that doesn't update the version number,
|
||||
just in SLE.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Apr 8 05:44:04 UTC 2024 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
|
||||
- Remove not needed upstream patches:
|
||||
* libexpat260.patch
|
||||
* CVE-2023-6597-TempDir-cleaning-symlink.patch, bsc#1219666
|
||||
* CVE-2024-0397-memrace_ssl.SSLContext_cert_store.patch
|
||||
|
||||
- Update to 3.11.9:
|
||||
* Security
|
||||
- gh-115398: Allow controlling Expat >=2.6.0 reparse deferral
|
||||
(CVE-2023-52425, bsc#1219559) by adding five new methods:
|
||||
xml.etree.ElementTree.XMLParser.flush()
|
||||
xml.etree.ElementTree.XMLPullParser.flush()
|
||||
xml.parsers.expat.xmlparser.GetReparseDeferralEnabled()
|
||||
xml.parsers.expat.xmlparser.SetReparseDeferralEnabled()
|
||||
xml.sax.expatreader.ExpatParser.flush()
|
||||
- gh-115399: Update bundled libexpat to 2.6.0
|
||||
- gh-115243: Fix possible crashes in collections.deque.index()
|
||||
when the deque is concurrently modified.
|
||||
- gh-114572: ssl.SSLContext.cert_store_stats() and
|
||||
ssl.SSLContext.get_ca_certs() now correctly lock access to the
|
||||
certificate store, when the ssl.SSLContext is shared across
|
||||
multiple threads (bsc#1226447, CVE-2024-0397).
|
||||
* Core and Builtins
|
||||
- gh-116296: Fix possible refleak in object.__reduce__() internal
|
||||
error handling.
|
||||
- gh-116034: Fix location of the error on a failed assertion.
|
||||
- gh-115823: Properly calculate error ranges in the parser when
|
||||
raising SyntaxError exceptions caused by invalid byte sequences.
|
||||
Patch by Pablo Galindo
|
||||
- gh-112087: For an empty reverse iterator for list will be
|
||||
reduced to reversed(). Patch by Donghee Na.
|
||||
- gh-115011: Setters for members with an unsigned integer type now
|
||||
support the same range of valid values for objects that has a
|
||||
__index__() method as for int.
|
||||
- gh-96497: Fix incorrect resolution of mangled class variables
|
||||
used in assignment expressions in comprehensions.
|
||||
* Library
|
||||
- gh-117310: Fixed an unlikely early & extra Py_DECREF triggered
|
||||
crash in ssl when creating a new _ssl._SSLContext if CPython was
|
||||
built implausibly such that the default cipher list is empty or
|
||||
the SSL library it was linked against reports a failure from its
|
||||
C SSL_CTX_set_cipher_list() API.
|
||||
- gh-117178: Fix regression in lazy loading of self-referential
|
||||
modules, introduced in gh-114781.
|
||||
- gh-117084: Fix zipfile extraction for directory entries with the
|
||||
name containing backslashes on Windows.
|
||||
- gh-117110: Fix a bug that prevents subclasses of typing.Any to
|
||||
be instantiated with arguments. Patch by Chris Fu.
|
||||
- gh-90872: On Windows, subprocess.Popen.wait() no longer calls
|
||||
WaitForSingleObject() with a negative timeout: pass 0 ms if the
|
||||
timeout is negative. Patch by Victor Stinner.
|
||||
- gh-116957: configparser: Don’t leave ConfigParser values in an
|
||||
invalid state (stored as a list instead of a str) after an
|
||||
earlier read raised DuplicateSectionError or
|
||||
DuplicateOptionError.
|
||||
- gh-90095: Ignore empty lines and comments in .pdbrc
|
||||
- gh-116764: Restore support of None and other false values in
|
||||
urllib.parse functions parse_qs() and parse_qsl(). Also, they
|
||||
now raise a TypeError for non-zero integers and non-empty
|
||||
sequences.
|
||||
- gh-116811: In PathFinder.invalidate_caches, delegate to
|
||||
MetadataPathFinder.invalidate_caches.
|
||||
- gh-116600: Fix repr() for global Flag members.
|
||||
- gh-116484: Change automatically generated tkinter.Checkbutton
|
||||
widget names to avoid collisions with automatically generated
|
||||
tkinter.ttk.Checkbutton widget names within the same parent
|
||||
widget.
|
||||
- gh-116401: Fix blocking os.fwalk() and shutil.rmtree() on
|
||||
opening named pipe.
|
||||
- gh-116143: Fix a race in pydoc _start_server, eliminating a
|
||||
window in which _start_server can return a thread that is
|
||||
“serving” but without a docserver set.
|
||||
- gh-116325: typing: raise SyntaxError instead of AttributeError
|
||||
on forward references as empty strings.
|
||||
- gh-90535: Fix support of interval values > 1 in
|
||||
logging.TimedRotatingFileHandler for when='MIDNIGHT' and
|
||||
when='Wx'.
|
||||
- gh-115978: Disable preadv(), readv(), pwritev(), and writev() on
|
||||
WASI.
|
||||
- Under wasmtime for WASI 0.2, these functions don’t pass
|
||||
test_posix
|
||||
(https://github.com/bytecodealliance/wasmtime/issues/7830).
|
||||
- gh-88352: Fix the computation of the next rollover time in the
|
||||
logging.TimedRotatingFileHandler handler. computeRollover() now
|
||||
always returns a timestamp larger than the specified time and
|
||||
works correctly during the DST change. doRollover() no longer
|
||||
overwrite the already rolled over file, saving from data loss
|
||||
when run at midnight or during repeated time at the DST change.
|
||||
- gh-87115: Set __main__.__spec__ to None when running a script
|
||||
with pdb
|
||||
- gh-76511: Fix UnicodeEncodeError in email.Message.as_string()
|
||||
that results when a message that claims to be in the ascii
|
||||
character set actually has non-ascii characters. Non-ascii
|
||||
characters are now replaced with the U+FFFD replacement
|
||||
character, like in the replace error handler.
|
||||
- gh-75988: Fixed unittest.mock.create_autospec() to pass the call
|
||||
through to the wrapped object to return the real result.
|
||||
- gh-115881: Fix issue where ast.parse() would incorrectly flag
|
||||
conditional context managers (such as with (x() if y else z()):
|
||||
...) as invalid syntax if feature_version=(3, 8) was passed.
|
||||
This reverts changes to the grammar made as part of gh-94949.
|
||||
- gh-115886: Fix silent truncation of the name with an embedded
|
||||
null character in multiprocessing.shared_memory.SharedMemory.
|
||||
- gh-115809: Improve algorithm for computing which rolled-over log
|
||||
files to delete in logging.TimedRotatingFileHandler. It is now
|
||||
reliable for handlers without namer and with arbitrary
|
||||
deterministic namer that leaves the datetime part in the file
|
||||
name unmodified.
|
||||
- gh-74668: urllib.parse functions parse_qs() and parse_qsl() now
|
||||
support bytes arguments containing raw and percent-encoded
|
||||
non-ASCII data.
|
||||
- gh-67044: csv.writer() now always quotes or escapes '\r' and
|
||||
'\n', regardless of lineterminator value.
|
||||
- gh-115712: csv.writer() now quotes empty fields if delimiter is
|
||||
a space and skipinitialspace is true and raises exception if
|
||||
quoting is not possible.
|
||||
- gh-115618: Fix improper decreasing the reference count for None
|
||||
argument in property methods getter(), setter() and deleter().
|
||||
- gh-115570: A DeprecationWarning is no longer omitted on access
|
||||
to the __doc__ attributes of the deprecated typing.io and
|
||||
typing.re pseudo-modules.
|
||||
- gh-112006: Fix inspect.unwrap() for types with the __wrapper__
|
||||
data descriptor.
|
||||
- gh-101293: Support callables with the __call__() method and
|
||||
types with __new__() and __init__() methods set to class
|
||||
methods, static methods, bound methods, partial functions, and
|
||||
other types of methods and descriptors in
|
||||
inspect.Signature.from_callable().
|
||||
- gh-115392: Fix a bug in doctest where incorrect line numbers
|
||||
would be reported for decorated functions.
|
||||
- gh-114563: Fix several format() bugs when using the C
|
||||
implementation of Decimal: * memory leak in some rare cases when
|
||||
using the z format option (coerce negative 0) * incorrect output
|
||||
when applying the z format option to type F (fixed-point with
|
||||
capital NAN / INF) * incorrect output when applying the # format
|
||||
option (alternate form)
|
||||
- gh-115197: urllib.request no longer resolves the hostname before
|
||||
checking it against the system’s proxy bypass list on macOS and
|
||||
Windows.
|
||||
- gh-115198: Fix support of Docutils >= 0.19 in distutils.
|
||||
- gh-115165: Most exceptions are now ignored when attempting to
|
||||
set the __orig_class__ attribute on objects returned when
|
||||
calling typing generic aliases (including generic aliases
|
||||
created using typing.Annotated). Previously only AttributeError
|
||||
was ignored. Patch by Dave Shawley.
|
||||
- gh-115133: Fix tests for XMLPullParser with Expat 2.6.0.
|
||||
- gh-115059: io.BufferedRandom.read1() now flushes the underlying
|
||||
write buffer.
|
||||
- gh-79382: Trailing ** no longer allows to match files and
|
||||
non-existing paths in recursive glob().
|
||||
- gh-114763: Protect modules loaded with importlib.util.LazyLoader
|
||||
from race conditions when multiple threads try to access
|
||||
attributes before the loading is complete.
|
||||
- gh-97959: Fix rendering class methods, bound methods, method and
|
||||
function aliases in pydoc. Class methods no longer have “method
|
||||
of builtins.type instance” note. Corresponding notes are now
|
||||
added for class and unbound methods. Method and function aliases
|
||||
now have references to the module or the class where the origin
|
||||
was defined if it differs from the current. Bound methods are
|
||||
now listed in the static methods section. Methods of builtin
|
||||
classes are now supported as well as methods of Python classes.
|
||||
- gh-112281: Allow creating union of types for typing.Annotated
|
||||
with unhashable metadata.
|
||||
- gh-111775: Fix importlib.resources.simple.ResourceHandle.open()
|
||||
for text mode, added missed stream argument.
|
||||
- gh-90095: Make .pdbrc and -c work with any valid pdb commands.
|
||||
- gh-107155: Fix incorrect output of help(x) where x is a lambda
|
||||
function, which has an __annotations__ dictionary attribute with
|
||||
a "return" key.
|
||||
- gh-105866: Fixed _get_slots bug which caused error when defining
|
||||
dataclasses with slots and a weakref_slot.
|
||||
- gh-60346: Fix ArgumentParser inconsistent with parse_known_args.
|
||||
- gh-100985: Update HTTPSConnection to consistently wrap IPv6
|
||||
Addresses when using a proxy.
|
||||
- gh-100884: email: fix misfolding of comma in address-lists over
|
||||
multiple lines in combination with unicode encoding.
|
||||
- gh-95782: Fix io.BufferedReader.tell(),
|
||||
io.BufferedReader.seek(), _pyio.BufferedReader.tell(),
|
||||
io.BufferedRandom.tell(), io.BufferedRandom.seek() and
|
||||
_pyio.BufferedRandom.tell() being able to return negative
|
||||
offsets.
|
||||
- gh-96310: Fix a traceback in argparse when all options in a
|
||||
mutually exclusive group are suppressed.
|
||||
- gh-93205: Fixed a bug in
|
||||
logging.handlers.TimedRotatingFileHandler where multiple
|
||||
rotating handler instances pointing to files with the same name
|
||||
but different extensions would conflict and not delete the
|
||||
correct files.
|
||||
- bpo-44865: Add missing call to localization function in
|
||||
argparse.
|
||||
- bpo-43952: Fix multiprocessing.connection.Listener.accept() to
|
||||
accept empty bytes as authkey. Not accepting empty bytes as key
|
||||
causes it to hang indefinitely.
|
||||
- bpo-42125: linecache: get module name from __spec__ if
|
||||
available. This allows getting source code for the __main__
|
||||
module when a custom loader is used.
|
||||
- gh-66543: Make mimetypes.guess_type() properly parsing of URLs
|
||||
with only a host name, URLs containing fragment or query, and
|
||||
filenames with only a UNC sharepoint on Windows. Based on patch
|
||||
by Dong-hee Na.
|
||||
- bpo-33775: Add ‘default’ and ‘version’ help text for
|
||||
localization in argparse.
|
||||
* Documentation
|
||||
- gh-115399: Document CVE-2023-52425 of Expat <2.6.0 under “XML
|
||||
vulnerabilities”.
|
||||
- gh-115233: Fix an example for LoggerAdapter in the Logging
|
||||
Cookbook.
|
||||
* Tests
|
||||
- gh-83434: Disable JUnit XML output (--junit-xml=FILE command
|
||||
line option) in regrtest when hunting for reference leaks (-R
|
||||
option). Patch by Victor Stinner.
|
||||
- gh-117187: Fix XML tests for vanilla Expat <2.6.0.
|
||||
- gh-115979: Update test_importlib so that it passes under WASI
|
||||
SDK 21.
|
||||
- gh-116307: Added import helper isolated_modules as CleanImport
|
||||
does not remove modules imported during the context.
|
||||
- gh-115720: Leak tests (-R, --huntrleaks) now show a summary of
|
||||
the number of leaks found in each iteration.
|
||||
- gh-115122: Add --bisect option to regrtest test runner: run
|
||||
failed tests with test.bisect_cmd to identify failing tests.
|
||||
Patch by Victor Stinner.
|
||||
- gh-115596: Fix ProgramPriorityTests in test_os permanently
|
||||
changing the process priority.
|
||||
- gh-115198: Fix test_check_metadata_deprecate in distutils tests
|
||||
with a newer Docutils.
|
||||
* Build
|
||||
- gh-116313: Get WASI builds to work under wasmtime 18 w/ WASI
|
||||
0.2/preview2 primitives.
|
||||
- gh-115167: Avoid vendoring vcruntime140_threads.dll when
|
||||
building with Visual Studio 2022 version 17.8.
|
||||
* Windows
|
||||
- gh-116773: Fix instances of <_overlapped.Overlapped object at
|
||||
0xXXX> still has pending operation at deallocation, the process
|
||||
may crash.
|
||||
- gh-91227: Fix the asyncio ProactorEventLoop implementation so
|
||||
that sending a datagram to an address that is not listening does
|
||||
not prevent receiving any more datagrams.
|
||||
- gh-115554: The installer now has more strict rules about
|
||||
updating the Python Launcher for Windows. In general, most users
|
||||
only have a single launcher installed and will see no
|
||||
difference. When multiple launchers have been installed, the
|
||||
option to install the launcher is disabled until all but one
|
||||
have been removed. Downgrading the launcher (which was never
|
||||
allowed) is now more obviously blocked.
|
||||
- gh-115543: Python Launcher for Windows can now detect Python
|
||||
3.13 when installed from the Microsoft Store, and will install
|
||||
Python 3.12 by default when PYLAUNCHER_ALLOW_INSTALL is set.
|
||||
- gh-115009: Update Windows installer to use SQLite 3.45.1.
|
||||
* IDLE
|
||||
- gh-88516: On macOS show a proxy icon in the title bar of editor
|
||||
windows to match platform behaviour.
|
||||
* Tools/Demos
|
||||
- gh-113516: Don’t set LDSHARED when building for WASI.
|
||||
* C API
|
||||
- gh-117021: Fix integer overflow in PyLong_AsPid() on non-Windows
|
||||
64-bit platforms.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Sun Mar 24 07:51:45 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Add reference to CVE-2024-0450 (bsc#1221854) to changelog.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Mar 22 21:22:27 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
@ -73,7 +518,7 @@ Fri Feb 23 01:06:42 UTC 2024 - Matej Cepl <mcepl@suse.com>
|
||||
Tue Feb 20 22:14:02 UTC 2024 - Matej Cepl <mcepl@cepl.eu>
|
||||
|
||||
- Remove double definition of /usr/bin/idle%%{version} in
|
||||
%%files.
|
||||
%%files.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu Feb 15 10:29:07 UTC 2024 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
@ -240,7 +685,7 @@ Thu Feb 8 07:27:40 UTC 2024 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
tkinter._test().
|
||||
- gh-109858: Protect zipfile from “quoted-overlap” zipbomb. It now
|
||||
raises BadZipFile when try to read an entry that overlaps with
|
||||
other entry or central directory.
|
||||
other entry or central directory (bsc#1221854, CVE-2024-0450).
|
||||
- gh-38807: Fix race condition in trace. Instead of checking if a
|
||||
directory exists and creating it, directly call os.makedirs()
|
||||
with the kwarg exist_ok=True.
|
||||
@ -331,7 +776,8 @@ Thu Feb 8 07:27:40 UTC 2024 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
METH_FASTCALL | METH_KEYWORDS calling convention. Only the
|
||||
positional parameter count was checked; any keyword argument
|
||||
passed would be silently accepted.
|
||||
|
||||
- Remove upstreamed patches:
|
||||
- CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch
|
||||
- Refresh all patches:
|
||||
- CVE-2023-27043-email-parsing-errors.patch
|
||||
- F00251-change-user-install-location.patch
|
||||
@ -992,12 +1438,12 @@ Wed Sep 6 07:52:11 UTC 2023 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
-------------------------------------------------------------------
|
||||
Thu Aug 10 09:33:26 UTC 2023 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
- restrict PEP668 to ALP/Tumbleweed
|
||||
- restrict PEP668 to ALP/Tumbleweed
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Aug 4 06:37:41 UTC 2023 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
- add externally_managed.in to label this build as PEP-668 managed
|
||||
- add externally_managed.in to label this build as PEP-668 managed
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu Aug 3 14:53:38 UTC 2023 - Matej Cepl <mcepl@suse.com>
|
||||
@ -2352,7 +2798,7 @@ Sat Mar 26 22:52:45 UTC 2022 - Matej Cepl <mcepl@suse.com>
|
||||
Tue Feb 22 05:53:06 UTC 2022 - Steve Kowalik <steven.kowalik@suse.com>
|
||||
|
||||
- Add patch support-expat-245.patch:
|
||||
* Support Expat >= 2.4.5
|
||||
* Support Expat >= 2.4.5
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Feb 15 23:05:55 UTC 2022 - Matej Cepl <mcepl@suse.com>
|
||||
@ -2542,7 +2988,7 @@ Sat Jun 5 21:21:38 UTC 2021 - Matej Cepl <mcepl@suse.com>
|
||||
-------------------------------------------------------------------
|
||||
Fri Jun 4 21:36:30 UTC 2021 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
- allow build with Sphinx >= 3.x
|
||||
- allow build with Sphinx >= 3.x
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Jun 2 13:12:04 UTC 2021 - Dan Čermák <dcermak@suse.com>
|
||||
@ -3094,7 +3540,7 @@ Sat Dec 12 14:29:33 UTC 2020 - Matej Cepl <mcepl@suse.com>
|
||||
Thu Dec 10 00:26:51 UTC 2020 - Benjamin Greiner <code@bnavigator.de>
|
||||
|
||||
- Last try before this results in an editwar:
|
||||
* remove importlib_resources and importlib-metadata
|
||||
* remove importlib_resources and importlib-metadata
|
||||
provides/obsoletes
|
||||
* import importlib_resources is not the same as
|
||||
import importlib.resources, same for metadata
|
||||
@ -3211,54 +3657,54 @@ Tue Jul 21 09:53:06 UTC 2020 - Callum Farmer <callumjfarmer13@gmail.com>
|
||||
- Removed CVE-2019-20907_tarfile-inf-loop.patch: fixed in upstream
|
||||
- Removed recursion.tar: contained in upstream
|
||||
- Update to 3.9.0b5:
|
||||
- bpo-41304: Fixes python3x._pth being ignored on Windows, caused
|
||||
- bpo-41304: Fixes python3x._pth being ignored on Windows, caused
|
||||
by the fix for bpo-29778 (CVE-2020-15801).
|
||||
- bpo-41162: Audit hooks are now cleared later during
|
||||
finalization to avoid missing events.
|
||||
- bpo-29778: Ensure python3.dll is loaded from correct locations
|
||||
- bpo-29778: Ensure python3.dll is loaded from correct locations
|
||||
when Python is embedded (CVE-2020-15523).
|
||||
- bpo-39603: Prevent http header injection by rejecting control
|
||||
- bpo-39603: Prevent http header injection by rejecting control
|
||||
characters in http.client.putrequest(…).
|
||||
- bpo-41295: Resolve a regression in CPython 3.8.4 where defining
|
||||
“__setattr__” in a multi-inheritance setup and
|
||||
“__setattr__” in a multi-inheritance setup and
|
||||
calling up the hierarchy chain could fail if builtins/extension
|
||||
types were involved in the base types.
|
||||
- bpo-41247: Always cache the running loop holder when running
|
||||
- bpo-41247: Always cache the running loop holder when running
|
||||
asyncio.set_running_loop.
|
||||
- bpo-41252: Fix incorrect refcounting in
|
||||
- bpo-41252: Fix incorrect refcounting in
|
||||
_ssl.c’s _servername_callback().
|
||||
- bpo-41215: Use non-NULL default values in the PEG parser
|
||||
- bpo-41215: Use non-NULL default values in the PEG parser
|
||||
keyword list to overcome a bug that was '
|
||||
preventing Python from being properly compiled when using the
|
||||
XLC compiler. Patch by Pablo Galindo.
|
||||
- bpo-41218: Python 3.8.3 had a regression where compiling with
|
||||
ast.PyCF_ALLOW_TOP_LEVEL_AWAIT would
|
||||
- bpo-41218: Python 3.8.3 had a regression where compiling with
|
||||
ast.PyCF_ALLOW_TOP_LEVEL_AWAIT would
|
||||
aggressively mark list comprehension with CO_COROUTINE. Now only
|
||||
list comprehension making use of async/await will tagged as so.
|
||||
- bpo-41175: Guard against a NULL pointer dereference within
|
||||
- bpo-41175: Guard against a NULL pointer dereference within
|
||||
bytearrayobject triggered by the bytearray() + bytearray() operation.
|
||||
- bpo-39960: The “hackcheck” that prevents sneaking around a type’s
|
||||
__setattr__() by calling the superclass method was
|
||||
- bpo-39960: The “hackcheck” that prevents sneaking around a type’s
|
||||
__setattr__() by calling the superclass method was
|
||||
rewritten to allow C implemented heap types.
|
||||
- bpo-41288: Unpickling invalid NEWOBJ_EX opcode with the
|
||||
- bpo-41288: Unpickling invalid NEWOBJ_EX opcode with the
|
||||
C implementation raises now UnpicklingError instead of crashing.
|
||||
- bpo-39017: Avoid infinite loop when reading specially crafted
|
||||
- bpo-39017: Avoid infinite loop when reading specially crafted
|
||||
TAR files using the tarfile module (CVE-2019-20907, bsc#1174091).
|
||||
- bpo-41235: Fix the error handling in ssl.SSLContext.load_dh_params().
|
||||
- bpo-41207: In distutils.spawn, restore expectation that
|
||||
- bpo-41207: In distutils.spawn, restore expectation that
|
||||
DistutilsExecError is raised when the command is not found.
|
||||
- bpo-39168: Remove the __new__ method of typing.Generic.
|
||||
- bpo-41194: Fix a crash in the _ast module: it can no longer be
|
||||
- bpo-41194: Fix a crash in the _ast module: it can no longer be
|
||||
loaded more than once. It now uses a global state rather than a module state.
|
||||
- bpo-39384: Fixed email.contentmanager to allow set_content() to set a
|
||||
- bpo-39384: Fixed email.contentmanager to allow set_content() to set a
|
||||
null string.
|
||||
- bpo-41300: Save files with non-ascii chars.
|
||||
- bpo-41300: Save files with non-ascii chars.
|
||||
Fix regression released in 3.9.0b4 and 3.8.4.
|
||||
- bpo-37765: Add keywords to module name completion list.
|
||||
- bpo-37765: Add keywords to module name completion list.
|
||||
Rewrite Completions section of IDLE doc.
|
||||
- bpo-40170: Revert PyType_HasFeature() change: it reads
|
||||
again directly the PyTypeObject.tp_flags
|
||||
member when the limited C API is not used, rather than always calling
|
||||
- bpo-40170: Revert PyType_HasFeature() change: it reads
|
||||
again directly the PyTypeObject.tp_flags
|
||||
member when the limited C API is not used, rather than always calling
|
||||
PyType_GetFlags() which hides implementation details.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
@ -3779,7 +4225,7 @@ Wed Jun 5 12:19:09 CEST 2019 - Matej Cepl <mcepl@suse.com>
|
||||
pickling costs between processes
|
||||
- typed_ast is merged back to CPython
|
||||
- LOAD_GLOBAL is now 40% faster
|
||||
- pickle now uses Protocol 4 by default, improving performance
|
||||
- pickle now uses Protocol 4 by default, improving performance
|
||||
- Remove patches which were included in the upstream:
|
||||
- 00251-change-user-install-location.patch
|
||||
- 00316-mark-bdist_wininst-unsupported.patch
|
||||
@ -3924,7 +4370,7 @@ Mon Dec 17 17:24:49 CET 2018 - mcepl@suse.com
|
||||
|
||||
- Upgrade to 3.7.2rc1:
|
||||
* bugfix release, for the full list of all changes see
|
||||
https://docs.python.org/3.7/whatsnew/changelog.html#changelog
|
||||
https://docs.python.org/3.7/whatsnew/changelog.html#changelog
|
||||
- Make run of the test suite more verbose
|
||||
|
||||
-------------------------------------------------------------------
|
||||
@ -4351,7 +4797,7 @@ Mon Mar 13 14:04:22 UTC 2017 - jmatejek@suse.com
|
||||
Sat Feb 25 20:55:57 UTC 2017 - bwiedemann@suse.com
|
||||
|
||||
- Add 0001-allow-for-reproducible-builds-of-python-packages.patch
|
||||
upstream https://github.com/python/cpython/pull/296
|
||||
upstream https://github.com/python/cpython/pull/296
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Feb 8 12:30:20 UTC 2017 - jmatejek@suse.com
|
||||
@ -4417,7 +4863,7 @@ Mon Mar 7 20:38:11 UTC 2016 - toddrme2178@gmail.com
|
||||
|
||||
- Add Python-3.5.1-fix_lru_cache_copying.patch
|
||||
Fix copying the lru_cache() wrapper object.
|
||||
Fixes deep-copying lru_cache regression, which worked on
|
||||
Fixes deep-copying lru_cache regression, which worked on
|
||||
previous versions of python but fails on python 3.5.
|
||||
This fixes a bunch of packages in devel:languages:python3.
|
||||
See: https://bugs.python.org/issue25447
|
||||
@ -4555,7 +5001,7 @@ Sun Jan 11 13:01:30 UTC 2015 - p.drouand@gmail.com
|
||||
-------------------------------------------------------------------
|
||||
Sat Oct 18 20:14:54 UTC 2014 - crrodriguez@opensuse.org
|
||||
|
||||
- Only pkgconfig(x11) is required for build, not the whole
|
||||
- Only pkgconfig(x11) is required for build, not the whole
|
||||
set of packages provided by xorg-x11-devel metapackage.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
@ -4615,7 +5061,7 @@ Wed Mar 26 15:24:46 UTC 2014 - jmatejek@suse.com
|
||||
-------------------------------------------------------------------
|
||||
Mon Mar 24 17:29:31 UTC 2014 - dmueller@suse.com
|
||||
|
||||
- remove blacklisting of test_posix on aarch64: qemu bug is fixed
|
||||
- remove blacklisting of test_posix on aarch64: qemu bug is fixed
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Mar 17 18:26:58 UTC 2014 - jmatejek@suse.com
|
||||
@ -4718,7 +5164,7 @@ Tue Nov 19 14:28:41 UTC 2013 - jmatejek@suse.com
|
||||
-------------------------------------------------------------------
|
||||
Tue Oct 15 17:44:08 UTC 2013 - crrodriguez@opensuse.org
|
||||
|
||||
- build with -DOPENSSL_LOAD_CONF for the same reasons
|
||||
- build with -DOPENSSL_LOAD_CONF for the same reasons
|
||||
described in the python2 package.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
@ -4730,7 +5176,7 @@ Fri Aug 16 11:35:15 UTC 2013 - jmatejek@suse.com
|
||||
-------------------------------------------------------------------
|
||||
Thu Aug 8 14:54:49 UTC 2013 - dvaleev@suse.com
|
||||
|
||||
- Exclue test_faulthandler from tests on powerpc due to bnc#831629
|
||||
- Exclue test_faulthandler from tests on powerpc due to bnc#831629
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu Jun 13 15:05:34 UTC 2013 - jmatejek@suse.com
|
||||
@ -4789,7 +5235,7 @@ Fri Mar 1 07:42:21 UTC 2013 - dmueller@suse.com
|
||||
|
||||
- add ctypes-libffi-aarch64.patch:
|
||||
* import aarch64 support for libffi in _ctypes module
|
||||
- add aarch64 to the list of lib64 based archs
|
||||
- add aarch64 to the list of lib64 based archs
|
||||
- add movetogetdents64.diff:
|
||||
* port to getdents64, as SYS_getdents is not implemented everywhere
|
||||
|
||||
@ -4843,9 +5289,9 @@ Mon Oct 29 18:21:45 UTC 2012 - dmueller@suse.com
|
||||
-------------------------------------------------------------------
|
||||
Thu Oct 25 08:14:36 UTC 2012 - Rene.vanPaassen@gmail.com
|
||||
|
||||
- exclude test_math for SLE 11; math library fails on negative
|
||||
- exclude test_math for SLE 11; math library fails on negative
|
||||
gamma function values close to integers and 0, probably
|
||||
due to imprecision in -lm on SLE_11_SP2.
|
||||
due to imprecision in -lm on SLE_11_SP2.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Oct 16 12:15:34 UTC 2012 - coolo@suse.com
|
||||
@ -4869,7 +5315,7 @@ Mon Oct 1 08:53:03 UTC 2012 - idonmez@suse.com
|
||||
-------------------------------------------------------------------
|
||||
Thu Sep 27 12:35:01 UTC 2012 - idonmez@suse.com
|
||||
|
||||
- Correct dependency for python3-testsuite,
|
||||
- Correct dependency for python3-testsuite,
|
||||
python3-tkinter -> python3-tk
|
||||
|
||||
-------------------------------------------------------------------
|
||||
@ -4902,7 +5348,7 @@ Fri Aug 3 12:09:34 UTC 2012 - jmatejek@suse.com
|
||||
-------------------------------------------------------------------
|
||||
Fri Jul 27 09:02:41 UTC 2012 - dvaleev@suse.com
|
||||
|
||||
- skip test_io on ppc
|
||||
- skip test_io on ppc
|
||||
- drop test_io ppc patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
@ -4951,8 +5397,8 @@ Wed Jan 18 15:49:47 UTC 2012 - jmatejek@suse.com
|
||||
-------------------------------------------------------------------
|
||||
Sun Dec 25 13:25:01 UTC 2011 - idonmez@suse.com
|
||||
|
||||
- Use system ffi, included one is broken see
|
||||
http://bugs.python.org/issue11729 and
|
||||
- Use system ffi, included one is broken see
|
||||
http://bugs.python.org/issue11729 and
|
||||
http://bugs.python.org/issue12081
|
||||
|
||||
-------------------------------------------------------------------
|
||||
|
@ -36,6 +36,12 @@
|
||||
%bcond_without general
|
||||
%endif
|
||||
|
||||
%if 0%{?do_profiling}
|
||||
%bcond_without profileopt
|
||||
%else
|
||||
%bcond_with profileopt
|
||||
%endif
|
||||
|
||||
%define python_pkg_name python311
|
||||
%if "%{python_pkg_name}" == "%{primary_python}"
|
||||
%define primary_interpreter 1
|
||||
@ -94,13 +100,13 @@
|
||||
%define dynlib() %{sitedir}/lib-dynload/%{1}.cpython-%{abi_tag}-%{archname}-%{_os}%{?_gnu}%{?armsuffix}.so
|
||||
%bcond_without profileopt
|
||||
Name: %{python_pkg_name}%{psuffix}
|
||||
Version: 3.11.8
|
||||
Version: 3.11.11
|
||||
Release: 0
|
||||
Summary: Python 3 Interpreter
|
||||
License: Python-2.0
|
||||
URL: https://www.python.org/
|
||||
Source0: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz
|
||||
Source1: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.asc
|
||||
Source1: https://www.python.org/ftp/python/%{folderversion}/%{tarname}.tar.xz.sigstore
|
||||
Source2: baselibs.conf
|
||||
Source3: README.SUSE
|
||||
Source4: externally_managed.in
|
||||
@ -127,7 +133,7 @@ Source99: python.keyring
|
||||
Source100: PACKAGING-NOTES
|
||||
# PATCH-FEATURE-UPSTREAM F00251-change-user-install-location.patch bsc#[0-9]+ mcepl@suse.com
|
||||
# Fix installation in /usr/local (boo#1071941), originally from Fedora
|
||||
# https://src.fedoraproject.org/rpms/python3/blob/master/f/00251-change-user-install-location.patch
|
||||
# https://src.fedoraproject.org/rpms/python3.12/blob/rawhide/f/00251-change-user-install-location.patch
|
||||
# Set values of prefix and exec_prefix in distutils install command
|
||||
# to /usr/local if executable is /usr/bin/python* and RPM build
|
||||
# is not detected to make pip and distutils install into separate location
|
||||
@ -158,42 +164,23 @@ Patch11: fix_configure_rst.patch
|
||||
# PATCH-FIX-UPSTREAM skip_if_buildbot-extend.patch gh#python/cpython#103053 mcepl@suse.com
|
||||
# Skip test_freeze_simple_script
|
||||
Patch13: skip_if_buildbot-extend.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2023-27043-email-parsing-errors.patch bsc#1210638 mcepl@suse.com
|
||||
# Detect email address parsing errors and return empty tuple to
|
||||
# indicate the parsing error (old API)
|
||||
Patch14: CVE-2023-27043-email-parsing-errors.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2023-6597-TempDir-cleaning-symlink.patch bsc#1219666 mcepl@suse.com
|
||||
# tempfile.TemporaryDirectory: fix symlink bug in cleanup (from gh#python/cpython!99930)
|
||||
Patch15: CVE-2023-6597-TempDir-cleaning-symlink.patch
|
||||
# PATCH-FIX-UPSTREAM bsc1221260-test_asyncio-ResourceWarning.patch bsc#1221260 mcepl@suse.com
|
||||
# prevent ResourceWarning in test_asyncio tests
|
||||
Patch16: bsc1221260-test_asyncio-ResourceWarning.patch
|
||||
Patch15: bsc1221260-test_asyncio-ResourceWarning.patch
|
||||
# PATCH-FIX-OPENSUSE CVE-2023-52425-libexpat-2.6.0-backport.patch
|
||||
# This problem on libexpat is patched on SLE without version
|
||||
# update, this patch changes the tests to match the libexpat provided
|
||||
# by SUSE
|
||||
Patch17: CVE-2023-52425-libexpat-2.6.0-backport.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2024-4032-private-IP-addrs.patch bsc#1226448 mcepl@suse.com
|
||||
# rearrange definition of private v global IP addresses
|
||||
Patch18: CVE-2024-4032-private-IP-addrs.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch bsc#1221854 mcepl@suse.com
|
||||
# detecting the vulnerability of the "quoted-overlap" zipbomb
|
||||
Patch19: CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2024-0397-memrace_ssl.SSLContext_cert_store.patch bsc#1226447 mcepl@suse.com
|
||||
# removes memory race condition in ssl.SSLContext certificate store methods
|
||||
Patch20: CVE-2024-0397-memrace_ssl.SSLContext_cert_store.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2024-6923-email-hdr-inject.patch bsc#1228780 mcepl@suse.com
|
||||
# prevent email header injection, patch from gh#python/cpython!122608
|
||||
Patch21: CVE-2024-6923-email-hdr-inject.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2024-8088-zipfile-Path-sanitization.patch bsc#1229704 mcepl@suse.com
|
||||
# sanitizing names in zipfile.Path
|
||||
Patch22: CVE-2024-8088-zipfile-Path-sanitization.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2024-6232-ReDOS-backtrack-tarfile.patch bsc#1230227 mcepl@suse.com
|
||||
# removing backtracking when parsing tarfile headers
|
||||
Patch23: CVE-2024-6232-ReDOS-backtrack-tarfile.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2024-7592-quad-complex-cookies.patch bsc#1229596 mcepl@suse.com
|
||||
# fixing quadratic complexity in parsing "-quoted cookie values with backslashes
|
||||
Patch24: CVE-2024-7592-quad-complex-cookies.patch
|
||||
Patch16: CVE-2023-52425-libexpat-2.6.0-backport.patch
|
||||
Patch17: CVE-2023-52425-remove-reparse_deferral-tests.patch
|
||||
# PATCH-FIX-UPSTREAM bso1227999-reproducible-builds.patch bsc#1227999 mcepl@suse.com
|
||||
# reproducibility patches
|
||||
Patch19: bso1227999-reproducible-builds.patch
|
||||
# PATCH-FIX-UPSTREAM gh120226-fix-sendfile-test-kernel-610.patch gh#python/cpython#120226 mcepl@suse.com
|
||||
# Fix test_sendfile_close_peer_in_the_middle_of_receiving on Linux >= 6.10 (GH-120227)
|
||||
Patch22: gh120226-fix-sendfile-test-kernel-610.patch
|
||||
# PATCH-FIX-UPSTREAM Add platform triplets for 64-bit LoongArch gh#python/cpython#30939 glaubitz@suse.com
|
||||
Patch24: add-loongarch64-support.patch
|
||||
BuildRequires: autoconf-archive
|
||||
BuildRequires: automake
|
||||
BuildRequires: fdupes
|
||||
@ -235,7 +222,6 @@ BuildRequires: gettext
|
||||
BuildRequires: readline-devel
|
||||
BuildRequires: sqlite-devel
|
||||
BuildRequires: timezone
|
||||
BuildRequires: update-desktop-files
|
||||
BuildRequires: pkgconfig(ncurses)
|
||||
BuildRequires: pkgconfig(tk)
|
||||
BuildRequires: pkgconfig(x11)
|
||||
@ -436,13 +422,28 @@ other applications.
|
||||
|
||||
%prep
|
||||
%setup -q -n %{tarname}
|
||||
%autopatch -p1 -M 08
|
||||
|
||||
%patch -p1 -P 02
|
||||
%patch -p1 -P 03
|
||||
%patch -p1 -P 04
|
||||
%patch -p1 -P 05
|
||||
%patch -p1 -P 06
|
||||
%patch -p1 -P 07
|
||||
%patch -p1 -P 08
|
||||
|
||||
%if 0%{?suse_version} <= 1500
|
||||
%patch -P 09 -p1
|
||||
%endif
|
||||
|
||||
%autopatch -p1 -m 10
|
||||
%patch -p1 -P 10
|
||||
%patch -p1 -P 11
|
||||
%patch -p1 -P 13
|
||||
%patch -p1 -P 15
|
||||
%patch -p1 -P 16
|
||||
%patch -p1 -P 17
|
||||
%patch -p1 -P 19
|
||||
%patch -p1 -P 22
|
||||
%patch -p1 -P 24
|
||||
|
||||
# drop Autoconf version requirement
|
||||
sed -i 's/^AC_PREREQ/dnl AC_PREREQ/' configure.ac
|
||||
@ -684,7 +685,6 @@ done
|
||||
cp %{SOURCE19} idle%{python_version}.desktop
|
||||
sed -i -e 's:idle3:idle%{python_version}:g' idle%{python_version}.desktop
|
||||
install -m 644 -D -t %{buildroot}%{_datadir}/applications idle%{python_version}.desktop
|
||||
%suse_update_desktop_file idle%{python_version}
|
||||
|
||||
cp %{SOURCE20} idle%{python_version}.appdata.xml
|
||||
sed -i -e 's:idle3.desktop:idle%{python_version}.desktop:g' idle%{python_version}.appdata.xml
|
||||
@ -767,6 +767,9 @@ install -m 755 -D Tools/gdb/libpython.py %{buildroot}%{_datadir}/gdb/auto-load/%
|
||||
# install devel files to /config
|
||||
#cp Makefile Makefile.pre.in Makefile.pre $RPM_BUILD_ROOT%%{sitedir}/config-%%{python_abi}/
|
||||
|
||||
# Remove -IVendor/ from python-config boo#1231795
|
||||
sed -i 's/-IVendor\///' %{buildroot}%{_bindir}/python%{python_abi}-config
|
||||
|
||||
# RPM macros
|
||||
%if %{primary_interpreter}
|
||||
mkdir -p %{buildroot}%{_rpmconfigdir}/macros.d/
|
||||
@ -795,6 +798,11 @@ LD_LIBRARY_PATH=. ./python -O -c "from py_compile import compile; compile('$FAIL
|
||||
echo %{sitedir}/_import_failed > %{buildroot}/%{sitedir}/site-packages/zzzz-import-failed-hooks.pth
|
||||
%endif
|
||||
|
||||
# For the purposes of reproducibility, it is necessary to eliminate any *.pyc files inside documentation dirs
|
||||
if [ -d %{buildroot}%{_defaultdocdir} ] ; then
|
||||
find %{buildroot}%{_defaultdocdir} -type f -name \*.pyc -ls -exec rm -vf '{}' \;
|
||||
fi
|
||||
|
||||
%if %{with general}
|
||||
%files -n %{python_pkg_name}-tk
|
||||
%{sitedir}/tkinter
|
||||
|
@ -2,11 +2,9 @@
|
||||
Lib/test/support/__init__.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
Index: Python-3.11.8/Lib/test/support/__init__.py
|
||||
===================================================================
|
||||
--- Python-3.11.8.orig/Lib/test/support/__init__.py
|
||||
+++ Python-3.11.8/Lib/test/support/__init__.py
|
||||
@@ -383,7 +383,7 @@ def skip_if_buildbot(reason=None):
|
||||
--- a/Lib/test/support/__init__.py
|
||||
+++ b/Lib/test/support/__init__.py
|
||||
@@ -384,7 +384,7 @@ def skip_if_buildbot(reason=None):
|
||||
if not reason:
|
||||
reason = 'not suitable for buildbots'
|
||||
try:
|
||||
|
Loading…
x
Reference in New Issue
Block a user