Compare commits
4 Commits
| Author | SHA256 | Date | |
|---|---|---|---|
|
|
e035510961 | ||
|
|
240ba2dc31 | ||
|
|
7a15e51a04 | ||
|
|
5984203f71 |
256
CVE-2025-4565.patch
Normal file
256
CVE-2025-4565.patch
Normal file
@@ -0,0 +1,256 @@
|
||||
From 1e7f83ea1b1945065ce1b89051cd655e4b8de22d Mon Sep 17 00:00:00 2001
|
||||
From: Protobuf Team Bot <protobuf-github-bot@google.com>
|
||||
Date: Tue, 13 May 2025 14:42:18 -0700
|
||||
Subject: [PATCH 2/2] Add recursion depth limits to pure python
|
||||
|
||||
PiperOrigin-RevId: 758382549
|
||||
---
|
||||
python/google/protobuf/internal/decoder.py | 35 ++++++++++-
|
||||
.../google/protobuf/internal/decoder_test.py | 14 +++++
|
||||
.../google/protobuf/internal/message_test.py | 60 +++++++++++++++++--
|
||||
.../protobuf/internal/self_recursive.proto | 1 +
|
||||
4 files changed, 105 insertions(+), 5 deletions(-)
|
||||
|
||||
diff --git a/python/google/protobuf/internal/decoder.py b/python/google/protobuf/internal/decoder.py
|
||||
index 89d829142..de0bc19a5 100755
|
||||
--- a/python/google/protobuf/internal/decoder.py
|
||||
+++ b/python/google/protobuf/internal/decoder.py
|
||||
@@ -668,7 +668,13 @@ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
+ current_depth += 1
|
||||
+ if current_depth > _recursion_limit:
|
||||
+ raise _DecodeError(
|
||||
+ 'Error parsing message: too many levels of nesting.'
|
||||
+ )
|
||||
pos = value.add()._InternalParse(buffer, pos, end, current_depth)
|
||||
+ current_depth -= 1
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
@@ -687,7 +693,11 @@ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
+ current_depth += 1
|
||||
+ if current_depth > _recursion_limit:
|
||||
+ raise _DecodeError('Error parsing message: too many levels of nesting.')
|
||||
pos = value._InternalParse(buffer, pos, end, current_depth)
|
||||
+ current_depth -= 1
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
@@ -720,6 +730,11 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
+ current_depth += 1
|
||||
+ if current_depth > _recursion_limit:
|
||||
+ raise _DecodeError(
|
||||
+ 'Error parsing message: too many levels of nesting.'
|
||||
+ )
|
||||
if (
|
||||
value.add()._InternalParse(buffer, pos, new_pos, current_depth)
|
||||
!= new_pos
|
||||
@@ -727,6 +742,7 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
# The only reason _InternalParse would return early is if it
|
||||
# encountered an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
+ current_depth -= 1
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
@@ -746,10 +762,14 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
+ current_depth += 1
|
||||
+ if current_depth > _recursion_limit:
|
||||
+ raise _DecodeError('Error parsing message: too many levels of nesting.')
|
||||
if value._InternalParse(buffer, pos, new_pos, current_depth) != new_pos:
|
||||
# The only reason _InternalParse would return early is if it encountered
|
||||
# an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
+ current_depth -= 1
|
||||
return new_pos
|
||||
|
||||
return DecodeField
|
||||
@@ -984,6 +1004,15 @@ def _SkipGroup(buffer, pos, end):
|
||||
pos = new_pos
|
||||
|
||||
|
||||
+DEFAULT_RECURSION_LIMIT = 100
|
||||
+_recursion_limit = DEFAULT_RECURSION_LIMIT
|
||||
+
|
||||
+
|
||||
+def SetRecursionLimit(new_limit):
|
||||
+ global _recursion_limit
|
||||
+ _recursion_limit = new_limit
|
||||
+
|
||||
+
|
||||
def _DecodeUnknownFieldSet(buffer, pos, end_pos=None, current_depth=0):
|
||||
"""Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
|
||||
|
||||
@@ -1017,7 +1046,11 @@ def _DecodeUnknownField(
|
||||
data = buffer[pos:pos+size].tobytes()
|
||||
pos += size
|
||||
elif wire_type == wire_format.WIRETYPE_START_GROUP:
|
||||
- (data, pos) = _DecodeUnknownFieldSet(buffer, pos, None, current_depth)
|
||||
+ current_depth += 1
|
||||
+ if current_depth >= _recursion_limit:
|
||||
+ raise _DecodeError('Error parsing message: too many levels of nesting.')
|
||||
+ data, pos = _DecodeUnknownFieldSet(buffer, pos, None, current_depth)
|
||||
+ current_depth -= 1
|
||||
elif wire_type == wire_format.WIRETYPE_END_GROUP:
|
||||
return (0, -1)
|
||||
else:
|
||||
diff --git a/python/google/protobuf/internal/decoder_test.py b/python/google/protobuf/internal/decoder_test.py
|
||||
index f801b6e76..11e6465b6 100644
|
||||
--- a/python/google/protobuf/internal/decoder_test.py
|
||||
+++ b/python/google/protobuf/internal/decoder_test.py
|
||||
@@ -11,8 +11,10 @@
|
||||
import io
|
||||
import unittest
|
||||
|
||||
+from google.protobuf import message
|
||||
from google.protobuf.internal import decoder
|
||||
from google.protobuf.internal import testing_refleaks
|
||||
+from google.protobuf.internal import wire_format
|
||||
|
||||
|
||||
_INPUT_BYTES = b'\x84r\x12'
|
||||
@@ -52,6 +54,18 @@ class DecoderTest(unittest.TestCase):
|
||||
size = decoder._DecodeVarint(input_io)
|
||||
self.assertEqual(size, None)
|
||||
|
||||
+ def test_decode_unknown_group_field_too_many_levels(self):
|
||||
+ data = memoryview(b'\023' * 5_000_000)
|
||||
+ self.assertRaisesRegex(
|
||||
+ message.DecodeError,
|
||||
+ 'Error parsing message',
|
||||
+ decoder._DecodeUnknownField,
|
||||
+ data,
|
||||
+ 1,
|
||||
+ wire_format.WIRETYPE_START_GROUP,
|
||||
+ 1
|
||||
+ )
|
||||
+
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
diff --git a/python/google/protobuf/internal/message_test.py b/python/google/protobuf/internal/message_test.py
|
||||
index 48e6df806..6facb8135 100755
|
||||
--- a/python/google/protobuf/internal/message_test.py
|
||||
+++ b/python/google/protobuf/internal/message_test.py
|
||||
@@ -36,6 +36,7 @@ from google.protobuf.internal import enum_type_wrapper
|
||||
from google.protobuf.internal import more_extensions_pb2
|
||||
from google.protobuf.internal import more_messages_pb2
|
||||
from google.protobuf.internal import packed_field_test_pb2
|
||||
+from google.protobuf.internal import self_recursive_pb2
|
||||
from google.protobuf.internal import test_proto3_optional_pb2
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf.internal import testing_refleaks
|
||||
@@ -1339,6 +1340,52 @@ class MessageTest(unittest.TestCase):
|
||||
self.assertNotIn('oneof_string', m)
|
||||
|
||||
|
||||
+@testing_refleaks.TestCase
|
||||
+class TestRecursiveGroup(unittest.TestCase):
|
||||
+
|
||||
+ def _MakeRecursiveGroupMessage(self, n):
|
||||
+ msg = self_recursive_pb2.SelfRecursive()
|
||||
+ sub = msg
|
||||
+ for _ in range(n):
|
||||
+ sub = sub.sub_group
|
||||
+ sub.i = 1
|
||||
+ return msg.SerializeToString()
|
||||
+
|
||||
+ def testRecursiveGroups(self):
|
||||
+ recurse_msg = self_recursive_pb2.SelfRecursive()
|
||||
+ data = self._MakeRecursiveGroupMessage(100)
|
||||
+ recurse_msg.ParseFromString(data)
|
||||
+ self.assertTrue(recurse_msg.HasField('sub_group'))
|
||||
+
|
||||
+ def testRecursiveGroupsException(self):
|
||||
+ if api_implementation.Type() != 'python':
|
||||
+ api_implementation._c_module.SetAllowOversizeProtos(False)
|
||||
+ recurse_msg = self_recursive_pb2.SelfRecursive()
|
||||
+ data = self._MakeRecursiveGroupMessage(300)
|
||||
+ with self.assertRaises(message.DecodeError) as context:
|
||||
+ recurse_msg.ParseFromString(data)
|
||||
+ self.assertIn('Error parsing message', str(context.exception))
|
||||
+ if api_implementation.Type() == 'python':
|
||||
+ self.assertIn('too many levels of nesting', str(context.exception))
|
||||
+
|
||||
+ def testRecursiveGroupsUnknownFields(self):
|
||||
+ if api_implementation.Type() != 'python':
|
||||
+ api_implementation._c_module.SetAllowOversizeProtos(False)
|
||||
+ test_msg = unittest_pb2.TestAllTypes()
|
||||
+ data = self._MakeRecursiveGroupMessage(300) # unknown to test_msg
|
||||
+ with self.assertRaises(message.DecodeError) as context:
|
||||
+ test_msg.ParseFromString(data)
|
||||
+ self.assertIn(
|
||||
+ 'Error parsing message',
|
||||
+ str(context.exception),
|
||||
+ )
|
||||
+ if api_implementation.Type() == 'python':
|
||||
+ self.assertIn('too many levels of nesting', str(context.exception))
|
||||
+ decoder.SetRecursionLimit(310)
|
||||
+ test_msg.ParseFromString(data)
|
||||
+ decoder.SetRecursionLimit(decoder.DEFAULT_RECURSION_LIMIT)
|
||||
+
|
||||
+
|
||||
# Class to test proto2-only features (required, extensions, etc.)
|
||||
@testing_refleaks.TestCase
|
||||
class Proto2Test(unittest.TestCase):
|
||||
@@ -2722,8 +2769,6 @@ class PackedFieldTest(unittest.TestCase):
|
||||
self.assertEqual(golden_data, message.SerializeToString())
|
||||
|
||||
|
||||
-@unittest.skipIf(api_implementation.Type() == 'python',
|
||||
- 'explicit tests of the C++ implementation')
|
||||
@testing_refleaks.TestCase
|
||||
class OversizeProtosTest(unittest.TestCase):
|
||||
|
||||
@@ -2740,16 +2785,23 @@ class OversizeProtosTest(unittest.TestCase):
|
||||
msg.ParseFromString(self.GenerateNestedProto(100))
|
||||
|
||||
def testAssertOversizeProto(self):
|
||||
- api_implementation._c_module.SetAllowOversizeProtos(False)
|
||||
+ if api_implementation.Type() != 'python':
|
||||
+ api_implementation._c_module.SetAllowOversizeProtos(False)
|
||||
msg = unittest_pb2.TestRecursiveMessage()
|
||||
with self.assertRaises(message.DecodeError) as context:
|
||||
msg.ParseFromString(self.GenerateNestedProto(101))
|
||||
self.assertIn('Error parsing message', str(context.exception))
|
||||
|
||||
def testSucceedOversizeProto(self):
|
||||
- api_implementation._c_module.SetAllowOversizeProtos(True)
|
||||
+
|
||||
+ if api_implementation.Type() == 'python':
|
||||
+ decoder.SetRecursionLimit(310)
|
||||
+ else:
|
||||
+ api_implementation._c_module.SetAllowOversizeProtos(True)
|
||||
+
|
||||
msg = unittest_pb2.TestRecursiveMessage()
|
||||
msg.ParseFromString(self.GenerateNestedProto(101))
|
||||
+ decoder.SetRecursionLimit(decoder.DEFAULT_RECURSION_LIMIT)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
diff --git a/python/google/protobuf/internal/self_recursive.proto b/python/google/protobuf/internal/self_recursive.proto
|
||||
index 20bc2b4d3..d2a7f004b 100644
|
||||
--- a/python/google/protobuf/internal/self_recursive.proto
|
||||
+++ b/python/google/protobuf/internal/self_recursive.proto
|
||||
@@ -12,6 +12,7 @@ package google.protobuf.python.internal;
|
||||
message SelfRecursive {
|
||||
SelfRecursive sub = 1;
|
||||
int32 i = 2;
|
||||
+ SelfRecursive sub_group = 3 [features.message_encoding = DELIMITED];
|
||||
}
|
||||
|
||||
message IndirectRecursive {
|
||||
--
|
||||
2.51.1
|
||||
|
||||
58
CVE-2026-0994.patch
Normal file
58
CVE-2026-0994.patch
Normal file
@@ -0,0 +1,58 @@
|
||||
From b8ada4c2a07449fe8c4c4574292a501c1350c6e6 Mon Sep 17 00:00:00 2001
|
||||
From: aviralgarg05 <gargaviral99@gmail.com>
|
||||
Date: Fri, 9 Jan 2026 20:59:10 +0530
|
||||
Subject: [PATCH] Fix Any recursion depth bypass in Python
|
||||
json_format.ParseDict
|
||||
|
||||
This fixes a security vulnerability where nested google.protobuf.Any messages
|
||||
could bypass the max_recursion_depth limit, potentially leading to denial of
|
||||
service via stack overflow.
|
||||
|
||||
The root cause was that _ConvertAnyMessage() was calling itself recursively
|
||||
via methodcaller() for nested well-known types, bypassing the recursion depth
|
||||
tracking in ConvertMessage().
|
||||
|
||||
The fix routes well-known type parsing through ConvertMessage() to ensure
|
||||
proper recursion depth accounting for all message types including nested Any.
|
||||
|
||||
Fixes #25070
|
||||
---
|
||||
python/google/protobuf/json_format.py | 15 +++++++++------
|
||||
1 file changed, 9 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/python/google/protobuf/json_format.py b/python/google/protobuf/json_format.py
|
||||
index 2a6bba939..9ace6345e 100644
|
||||
--- a/python/google/protobuf/json_format.py
|
||||
+++ b/python/google/protobuf/json_format.py
|
||||
@@ -521,6 +521,10 @@ class _Parser(object):
|
||||
Raises:
|
||||
ParseError: In case of convert problems.
|
||||
"""
|
||||
+ # Increment recursion depth at message entry. The max_recursion_depth limit
|
||||
+ # is exclusive: a depth value equal to max_recursion_depth will trigger an
|
||||
+ # error. For example, with max_recursion_depth=5, nesting up to depth 4 is
|
||||
+ # allowed, but attempting depth 5 raises ParseError.
|
||||
self.recursion_depth += 1
|
||||
if self.recursion_depth > self.max_recursion_depth:
|
||||
raise ParseError(
|
||||
@@ -725,12 +729,11 @@ class _Parser(object):
|
||||
value['value'], sub_message, '{0}.value'.format(path)
|
||||
)
|
||||
elif full_name in _WKTJSONMETHODS:
|
||||
- methodcaller(
|
||||
- _WKTJSONMETHODS[full_name][1],
|
||||
- value['value'],
|
||||
- sub_message,
|
||||
- '{0}.value'.format(path),
|
||||
- )(self)
|
||||
+ # For well-known types (including nested Any), use ConvertMessage
|
||||
+ # to ensure recursion depth is properly tracked
|
||||
+ self.ConvertMessage(
|
||||
+ value['value'], sub_message, '{0}.value'.format(path)
|
||||
+ )
|
||||
else:
|
||||
del value['@type']
|
||||
self._ConvertFieldValuePair(value, sub_message, path)
|
||||
--
|
||||
2.52.0
|
||||
|
||||
421
internal-pure-python-fixes.patch
Normal file
421
internal-pure-python-fixes.patch
Normal file
@@ -0,0 +1,421 @@
|
||||
From dac2e91e36408087d769be89a72fbafe1ea5039c Mon Sep 17 00:00:00 2001
|
||||
From: Protobuf Team Bot <protobuf-github-bot@google.com>
|
||||
Date: Tue, 4 Mar 2025 13:16:32 -0800
|
||||
Subject: [PATCH 1/2] Internal pure python fixes
|
||||
|
||||
PiperOrigin-RevId: 733441339
|
||||
---
|
||||
python/google/protobuf/internal/decoder.py | 98 ++++++++++++++-----
|
||||
.../google/protobuf/internal/message_test.py | 1 +
|
||||
.../protobuf/internal/python_message.py | 7 +-
|
||||
.../protobuf/internal/self_recursive.proto | 9 +-
|
||||
4 files changed, 86 insertions(+), 29 deletions(-)
|
||||
|
||||
diff --git a/python/google/protobuf/internal/decoder.py b/python/google/protobuf/internal/decoder.py
|
||||
index dcde1d942..89d829142 100755
|
||||
--- a/python/google/protobuf/internal/decoder.py
|
||||
+++ b/python/google/protobuf/internal/decoder.py
|
||||
@@ -184,7 +184,10 @@ def _SimpleDecoder(wire_type, decode_value):
|
||||
clear_if_default=False):
|
||||
if is_packed:
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
- def DecodePackedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodePackedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
+ del current_depth # unused
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -199,11 +202,15 @@ def _SimpleDecoder(wire_type, decode_value):
|
||||
del value[-1] # Discard corrupt value.
|
||||
raise _DecodeError('Packed element was truncated.')
|
||||
return pos
|
||||
+
|
||||
return DecodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number, wire_type)
|
||||
tag_len = len(tag_bytes)
|
||||
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodeRepeatedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
+ del current_depth # unused
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -218,9 +225,12 @@ def _SimpleDecoder(wire_type, decode_value):
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return new_pos
|
||||
+
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
- def DecodeField(buffer, pos, end, message, field_dict):
|
||||
+
|
||||
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
|
||||
+ del current_depth # unused
|
||||
(new_value, pos) = decode_value(buffer, pos)
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
@@ -229,6 +239,7 @@ def _SimpleDecoder(wire_type, decode_value):
|
||||
else:
|
||||
field_dict[key] = new_value
|
||||
return pos
|
||||
+
|
||||
return DecodeField
|
||||
|
||||
return SpecificDecoder
|
||||
@@ -364,7 +375,9 @@ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
enum_type = key.enum_type
|
||||
if is_packed:
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
- def DecodePackedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodePackedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
"""Decode serialized packed enum to its value and a new position.
|
||||
|
||||
Args:
|
||||
@@ -377,6 +390,7 @@ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
Returns:
|
||||
int, new position in serialized data.
|
||||
"""
|
||||
+ del current_depth # unused
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -407,11 +421,14 @@ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
# pylint: enable=protected-access
|
||||
raise _DecodeError('Packed element was truncated.')
|
||||
return pos
|
||||
+
|
||||
return DecodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
tag_len = len(tag_bytes)
|
||||
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodeRepeatedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
"""Decode serialized repeated enum to its value and a new position.
|
||||
|
||||
Args:
|
||||
@@ -424,6 +441,7 @@ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
Returns:
|
||||
int, new position in serialized data.
|
||||
"""
|
||||
+ del current_depth # unused
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -446,9 +464,11 @@ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return new_pos
|
||||
+
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
- def DecodeField(buffer, pos, end, message, field_dict):
|
||||
+
|
||||
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
|
||||
"""Decode serialized repeated enum to its value and a new position.
|
||||
|
||||
Args:
|
||||
@@ -461,6 +481,7 @@ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
Returns:
|
||||
int, new position in serialized data.
|
||||
"""
|
||||
+ del current_depth # unused
|
||||
value_start_pos = pos
|
||||
(enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
|
||||
if pos > end:
|
||||
@@ -480,6 +501,7 @@ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
(tag_bytes, buffer[value_start_pos:pos].tobytes()))
|
||||
# pylint: enable=protected-access
|
||||
return pos
|
||||
+
|
||||
return DecodeField
|
||||
|
||||
|
||||
@@ -538,7 +560,10 @@ def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodeRepeatedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
+ del current_depth # unused
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -553,9 +578,12 @@ def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
+
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
- def DecodeField(buffer, pos, end, message, field_dict):
|
||||
+
|
||||
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
|
||||
+ del current_depth # unused
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
@@ -565,6 +593,7 @@ def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
else:
|
||||
field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
|
||||
return new_pos
|
||||
+
|
||||
return DecodeField
|
||||
|
||||
|
||||
@@ -579,7 +608,10 @@ def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodeRepeatedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
+ del current_depth # unused
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -594,9 +626,12 @@ def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
+
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
- def DecodeField(buffer, pos, end, message, field_dict):
|
||||
+
|
||||
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
|
||||
+ del current_depth # unused
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
@@ -606,6 +641,7 @@ def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
|
||||
else:
|
||||
field_dict[key] = buffer[pos:new_pos].tobytes()
|
||||
return new_pos
|
||||
+
|
||||
return DecodeField
|
||||
|
||||
|
||||
@@ -621,7 +657,9 @@ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_START_GROUP)
|
||||
tag_len = len(tag_bytes)
|
||||
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodeRepeatedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -630,7 +668,7 @@ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
- pos = value.add()._InternalParse(buffer, pos, end)
|
||||
+ pos = value.add()._InternalParse(buffer, pos, end, current_depth)
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
@@ -640,19 +678,22 @@ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
+
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
- def DecodeField(buffer, pos, end, message, field_dict):
|
||||
+
|
||||
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
- pos = value._InternalParse(buffer, pos, end)
|
||||
+ pos = value._InternalParse(buffer, pos, end, current_depth)
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
raise _DecodeError('Missing group end tag.')
|
||||
return new_pos
|
||||
+
|
||||
return DecodeField
|
||||
|
||||
|
||||
@@ -666,7 +707,9 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
+ def DecodeRepeatedField(
|
||||
+ buffer, pos, end, message, field_dict, current_depth=0
|
||||
+ ):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -677,7 +720,10 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
- if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
|
||||
+ if (
|
||||
+ value.add()._InternalParse(buffer, pos, new_pos, current_depth)
|
||||
+ != new_pos
|
||||
+ ):
|
||||
# The only reason _InternalParse would return early is if it
|
||||
# encountered an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
@@ -686,9 +732,11 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
+
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
- def DecodeField(buffer, pos, end, message, field_dict):
|
||||
+
|
||||
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
@@ -698,11 +746,12 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
- if value._InternalParse(buffer, pos, new_pos) != new_pos:
|
||||
+ if value._InternalParse(buffer, pos, new_pos, current_depth) != new_pos:
|
||||
# The only reason _InternalParse would return early is if it encountered
|
||||
# an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
return new_pos
|
||||
+
|
||||
return DecodeField
|
||||
|
||||
|
||||
@@ -851,7 +900,8 @@ def MapDecoder(field_descriptor, new_default, is_message_map):
|
||||
# Can't read _concrete_class yet; might not be initialized.
|
||||
message_type = field_descriptor.message_type
|
||||
|
||||
- def DecodeMap(buffer, pos, end, message, field_dict):
|
||||
+ def DecodeMap(buffer, pos, end, message, field_dict, current_depth=0):
|
||||
+ del current_depth # Unused.
|
||||
submsg = message_type._concrete_class()
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
@@ -934,7 +984,7 @@ def _SkipGroup(buffer, pos, end):
|
||||
pos = new_pos
|
||||
|
||||
|
||||
-def _DecodeUnknownFieldSet(buffer, pos, end_pos=None):
|
||||
+def _DecodeUnknownFieldSet(buffer, pos, end_pos=None, current_depth=0):
|
||||
"""Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
|
||||
|
||||
unknown_field_set = containers.UnknownFieldSet()
|
||||
@@ -944,14 +994,16 @@ def _DecodeUnknownFieldSet(buffer, pos, end_pos=None):
|
||||
field_number, wire_type = wire_format.UnpackTag(tag)
|
||||
if wire_type == wire_format.WIRETYPE_END_GROUP:
|
||||
break
|
||||
- (data, pos) = _DecodeUnknownField(buffer, pos, wire_type)
|
||||
+ (data, pos) = _DecodeUnknownField(buffer, pos, wire_type, current_depth)
|
||||
# pylint: disable=protected-access
|
||||
unknown_field_set._add(field_number, wire_type, data)
|
||||
|
||||
return (unknown_field_set, pos)
|
||||
|
||||
|
||||
-def _DecodeUnknownField(buffer, pos, wire_type):
|
||||
+def _DecodeUnknownField(
|
||||
+ buffer, pos, wire_type, current_depth=0
|
||||
+):
|
||||
"""Decode a unknown field. Returns the UnknownField and new position."""
|
||||
|
||||
if wire_type == wire_format.WIRETYPE_VARINT:
|
||||
@@ -965,7 +1017,7 @@ def _DecodeUnknownField(buffer, pos, wire_type):
|
||||
data = buffer[pos:pos+size].tobytes()
|
||||
pos += size
|
||||
elif wire_type == wire_format.WIRETYPE_START_GROUP:
|
||||
- (data, pos) = _DecodeUnknownFieldSet(buffer, pos)
|
||||
+ (data, pos) = _DecodeUnknownFieldSet(buffer, pos, None, current_depth)
|
||||
elif wire_type == wire_format.WIRETYPE_END_GROUP:
|
||||
return (0, -1)
|
||||
else:
|
||||
diff --git a/python/google/protobuf/internal/message_test.py b/python/google/protobuf/internal/message_test.py
|
||||
index 2a723eabb..48e6df806 100755
|
||||
--- a/python/google/protobuf/internal/message_test.py
|
||||
+++ b/python/google/protobuf/internal/message_test.py
|
||||
@@ -30,6 +30,7 @@ import warnings
|
||||
cmp = lambda x, y: (x > y) - (x < y)
|
||||
|
||||
from google.protobuf.internal import api_implementation # pylint: disable=g-import-not-at-top
|
||||
+from google.protobuf.internal import decoder
|
||||
from google.protobuf.internal import encoder
|
||||
from google.protobuf.internal import enum_type_wrapper
|
||||
from google.protobuf.internal import more_extensions_pb2
|
||||
diff --git a/python/google/protobuf/internal/python_message.py b/python/google/protobuf/internal/python_message.py
|
||||
index fabc6aa07..62c059cd2 100755
|
||||
--- a/python/google/protobuf/internal/python_message.py
|
||||
+++ b/python/google/protobuf/internal/python_message.py
|
||||
@@ -1194,7 +1194,7 @@ def _AddMergeFromStringMethod(message_descriptor, cls):
|
||||
fields_by_tag = cls._fields_by_tag
|
||||
message_set_decoders_by_tag = cls._message_set_decoders_by_tag
|
||||
|
||||
- def InternalParse(self, buffer, pos, end):
|
||||
+ def InternalParse(self, buffer, pos, end, current_depth=0):
|
||||
"""Create a message from serialized bytes.
|
||||
|
||||
Args:
|
||||
@@ -1244,10 +1244,13 @@ def _AddMergeFromStringMethod(message_descriptor, cls):
|
||||
else:
|
||||
_MaybeAddDecoder(cls, field_des)
|
||||
field_decoder = field_des._decoders[is_packed]
|
||||
- pos = field_decoder(buffer, new_pos, end, self, field_dict)
|
||||
+ pos = field_decoder(
|
||||
+ buffer, new_pos, end, self, field_dict, current_depth
|
||||
+ )
|
||||
if field_des.containing_oneof:
|
||||
self._UpdateOneofState(field_des)
|
||||
return pos
|
||||
+
|
||||
cls._InternalParse = InternalParse
|
||||
|
||||
|
||||
diff --git a/python/google/protobuf/internal/self_recursive.proto b/python/google/protobuf/internal/self_recursive.proto
|
||||
index dbfcaf971..20bc2b4d3 100644
|
||||
--- a/python/google/protobuf/internal/self_recursive.proto
|
||||
+++ b/python/google/protobuf/internal/self_recursive.proto
|
||||
@@ -5,18 +5,19 @@
|
||||
// license that can be found in the LICENSE file or at
|
||||
// https://developers.google.com/open-source/licenses/bsd
|
||||
|
||||
-syntax = "proto2";
|
||||
+edition = "2023";
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
message SelfRecursive {
|
||||
- optional SelfRecursive sub = 1;
|
||||
+ SelfRecursive sub = 1;
|
||||
+ int32 i = 2;
|
||||
}
|
||||
|
||||
message IndirectRecursive {
|
||||
- optional IntermediateRecursive intermediate = 1;
|
||||
+ IntermediateRecursive intermediate = 1;
|
||||
}
|
||||
|
||||
message IntermediateRecursive {
|
||||
- optional IndirectRecursive indirect = 1;
|
||||
+ IndirectRecursive indirect = 1;
|
||||
}
|
||||
--
|
||||
2.51.1
|
||||
|
||||
45
protobuf-fix-google-imports.patch
Normal file
45
protobuf-fix-google-imports.patch
Normal file
@@ -0,0 +1,45 @@
|
||||
From 8351926380c7cc91aae6df5695c91426e209f958 Mon Sep 17 00:00:00 2001
|
||||
From: Ge Yunxi <141423244+gyx47@users.noreply.github.com>
|
||||
Date: Fri, 11 Jul 2025 11:04:58 -0700
|
||||
Subject: [PATCH] drop-deprecated-pkg-resources-declare (#22442)
|
||||
|
||||
# Description
|
||||
As of setuptools 81, pkg_resources.declare_namespace has been marked as deprecated (scheduled to be removed after 2025-11-30) so I remove it from init.py
|
||||
|
||||
# Environment:
|
||||
a virtual machine of arch riscv64
|
||||
|
||||
# procedure
|
||||
I got this problem when running a test that applied this package.
|
||||
```
|
||||
src/certbot_dns_google/_internal/tests/dns_google_test.py:9: in <module>
|
||||
from google.auth import exceptions as googleauth_exceptions
|
||||
/usr/lib/python3.13/site-packages/google/__init__.py:2: in <module>
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
/usr/lib/python3.13/site-packages/pkg_resources/__init__.py:98: in <module>
|
||||
warnings.warn(
|
||||
E UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.
|
||||
```
|
||||
[certbot-dns-google-4.1.1-1-riscv64-check.log](https://github.com/user-attachments/files/20976539/certbot-dns-google-4.1.1-1-riscv64-check.log)
|
||||
|
||||
Closes #22442
|
||||
|
||||
COPYBARA_INTEGRATE_REVIEW=https://github.com/protocolbuffers/protobuf/pull/22442 from gyx47:patch-1 6aef5c9df150cce444910d224fe90b2a514c7868
|
||||
PiperOrigin-RevId: 782041935
|
||||
---
|
||||
python/google/__init__.py | 7 +++----
|
||||
1 file changed, 3 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/python/google/__init__.py b/python/google/__init__.py
|
||||
index 5585614122997..b36383a61027f 100644
|
||||
--- a/python/google/__init__.py
|
||||
+++ b/python/google/__init__.py
|
||||
@@ -1,4 +1,3 @@
|
||||
-try:
|
||||
- __import__('pkg_resources').declare_namespace(__name__)
|
||||
-except ImportError:
|
||||
- __path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
+from pkgutil import extend_path
|
||||
+
|
||||
+__path__ = extend_path(__path__, __name__)
|
||||
@@ -1,3 +1,28 @@
|
||||
-------------------------------------------------------------------
|
||||
Tue Jan 27 08:29:42 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Delete deprecated google/__init__.py namespace file
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jan 26 13:00:51 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Add CVE-2026-0994.patch to fix google.protobuf.Any recursion depth
|
||||
bypass in Python json_format.ParseDict (bsc#1257173, CVE-2026-0994)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jan 5 12:14:24 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Cherry-pick protobuf-fix-google-imports.patch to fix import issues of
|
||||
reverse-dependency packages within the google namespace (bsc#1244918)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Nov 14 14:32:06 UTC 2025 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Add internal-pure-python-fixes.patch to backport changes required for CVE fix
|
||||
- Add CVE-2025-4565.patch to fix parsing of untrusted Protocol Buffers
|
||||
data containing an arbitrary number of recursive groups or messages
|
||||
can lead to crash due to RecursionError (bsc#1244663, CVE-2025-4565)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Oct 28 08:20:17 UTC 2024 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# spec file for package protobuf-java
|
||||
#
|
||||
# Copyright (c) 2024 SUSE LLC
|
||||
# Copyright (c) 2026 SUSE LLC and contributors
|
||||
# Copyright (c) 2024 Andreas Stieger <Andreas.Stieger@gmx.de>
|
||||
#
|
||||
# All modifications and additions to the file contributed by third parties
|
||||
|
||||
@@ -1,3 +1,28 @@
|
||||
-------------------------------------------------------------------
|
||||
Tue Jan 27 08:29:42 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Delete deprecated google/__init__.py namespace file
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jan 26 13:00:51 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Add CVE-2026-0994.patch to fix google.protobuf.Any recursion depth
|
||||
bypass in Python json_format.ParseDict (bsc#1257173, CVE-2026-0994)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jan 5 12:14:24 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Cherry-pick protobuf-fix-google-imports.patch to fix import issues of
|
||||
reverse-dependency packages within the google namespace (bsc#1244918)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Nov 14 14:32:06 UTC 2025 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Add internal-pure-python-fixes.patch to backport changes required for CVE fix
|
||||
- Add CVE-2025-4565.patch to fix parsing of untrusted Protocol Buffers
|
||||
data containing an arbitrary number of recursive groups or messages
|
||||
can lead to crash due to RecursionError (bsc#1244663, CVE-2025-4565)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Oct 28 08:20:17 UTC 2024 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# spec file for package protobuf
|
||||
#
|
||||
# Copyright (c) 2024 SUSE LLC
|
||||
# Copyright (c) 2026 SUSE LLC and contributors
|
||||
# Copyright (c) 2024 Andreas Stieger <Andreas.Stieger@gmx.de>
|
||||
#
|
||||
# All modifications and additions to the file contributed by third parties
|
||||
@@ -75,6 +75,11 @@ URL: https://github.com/protocolbuffers/protobuf
|
||||
Source0: https://github.com/protocolbuffers/protobuf/releases/download/v%{version}/%{tarname}-%{version}.tar.gz
|
||||
Source1: baselibs.conf
|
||||
Patch1: versionize-shlibs.patch
|
||||
# PATCH-FIX-UPSTREAM - Backport changes from 29.x branch required to apply fix for CVE-2025-4565
|
||||
Patch2: internal-pure-python-fixes.patch
|
||||
# PATCH-FIX-UPSTREAM - Fix parsing of untrusted Protocol Buffers data containing an arbitrary
|
||||
# number of recursive groups or messages can lead to crash due to RecursionError (CVE-2025-4565)
|
||||
Patch3: CVE-2025-4565.patch
|
||||
BuildRequires: cmake
|
||||
BuildRequires: fdupes
|
||||
BuildRequires: gcc%{?with_gcc}-c++
|
||||
|
||||
@@ -1,3 +1,28 @@
|
||||
-------------------------------------------------------------------
|
||||
Tue Jan 27 08:29:42 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Delete deprecated google/__init__.py namespace file
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jan 26 13:00:51 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Add CVE-2026-0994.patch to fix google.protobuf.Any recursion depth
|
||||
bypass in Python json_format.ParseDict (bsc#1257173, CVE-2026-0994)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jan 5 12:14:24 UTC 2026 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Cherry-pick protobuf-fix-google-imports.patch to fix import issues of
|
||||
reverse-dependency packages within the google namespace (bsc#1244918)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Nov 14 14:32:06 UTC 2025 - John Paul Adrian Glaubitz <adrian.glaubitz@suse.com>
|
||||
|
||||
- Add internal-pure-python-fixes.patch to backport changes required for CVE fix
|
||||
- Add CVE-2025-4565.patch to fix parsing of untrusted Protocol Buffers
|
||||
data containing an arbitrary number of recursive groups or messages
|
||||
can lead to crash due to RecursionError (bsc#1244663, CVE-2025-4565)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Oct 28 08:20:17 UTC 2024 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# spec file for package python-protobuf
|
||||
#
|
||||
# Copyright (c) 2024 SUSE LLC
|
||||
# Copyright (c) 2026 SUSE LLC and contributors
|
||||
# Copyright (c) 2024 Andreas Stieger <Andreas.Stieger@gmx.de>
|
||||
#
|
||||
# All modifications and additions to the file contributed by third parties
|
||||
@@ -27,6 +27,9 @@ License: BSD-3-Clause
|
||||
Group: Development/Libraries/Python
|
||||
URL: https://github.com/protocolbuffers/protobuf
|
||||
Source0: https://files.pythonhosted.org/packages/source/p/protobuf/protobuf-%{version}.tar.gz
|
||||
Patch0: https://github.com/protocolbuffers/protobuf/commit/8351926380c7cc91aae6df5695c91426e209f958.patch#/protobuf-fix-google-imports.patch
|
||||
# PATCH-FIX-UPSTREAM - Fix google.protobuf.Any recursion depth bypass in Python json_format.ParseDict (CVE-2026-0994)
|
||||
Patch1: CVE-2026-0994.patch
|
||||
BuildRequires: %{python_module devel}
|
||||
BuildRequires: %{python_module pip}
|
||||
BuildRequires: %{python_module python-dateutil}
|
||||
@@ -43,7 +46,8 @@ RPC protocols and file formats.
|
||||
This package contains the Python bindings for Google Protocol Buffers.
|
||||
|
||||
%prep
|
||||
%autosetup -p1 -n protobuf-%{version}
|
||||
%autosetup -p2 -n protobuf-%{version}
|
||||
rm -f google/__init__.py
|
||||
|
||||
# The previous blank line is crucial for older system being able
|
||||
# to use the autosetup macro
|
||||
|
||||
Reference in New Issue
Block a user