diff --git a/python-2.5.2-CVE-2008-2315-int-overflows.patch b/python-2.5.2-CVE-2008-2315-int-overflows.patch new file mode 100644 index 0000000..506f5f4 --- /dev/null +++ b/python-2.5.2-CVE-2008-2315-int-overflows.patch @@ -0,0 +1,566 @@ +Index: Objects/unicodeobject.c +=================================================================== +--- Objects/unicodeobject.c (revision 65261) ++++ Objects/unicodeobject.c (working copy) +@@ -240,6 +240,11 @@ + return unicode_empty; + } + ++ /* Ensure we won't overflow the size. */ ++ if (length > ((PY_SSIZE_T_MAX / sizeof(Py_UNICODE)) - 1)) { ++ return (PyUnicodeObject *)PyErr_NoMemory(); ++ } ++ + /* Unicode freelist & memory allocation */ + if (unicode_freelist) { + unicode = unicode_freelist; +@@ -1095,6 +1100,9 @@ + char * out; + char * start; + ++ if (cbAllocated / 5 != size) ++ return PyErr_NoMemory(); ++ + if (size == 0) + return PyString_FromStringAndSize(NULL, 0); + +@@ -1693,8 +1701,9 @@ + { + PyObject *v; + unsigned char *p; ++ Py_ssize_t nsize, bytesize; + #ifdef Py_UNICODE_WIDE +- int i, pairs; ++ Py_ssize_t i, pairs; + #else + const int pairs = 0; + #endif +@@ -1717,8 +1726,15 @@ + if (s[i] >= 0x10000) + pairs++; + #endif +- v = PyString_FromStringAndSize(NULL, +- 2 * (size + pairs + (byteorder == 0))); ++ /* 2 * (size + pairs + (byteorder == 0)) */ ++ if (size > PY_SSIZE_T_MAX || ++ size > PY_SSIZE_T_MAX - pairs - (byteorder == 0)) ++ return PyErr_NoMemory(); ++ nsize = (size + pairs + (byteorder == 0)); ++ bytesize = nsize * 2; ++ if (bytesize / 2 != nsize) ++ return PyErr_NoMemory(); ++ v = PyString_FromStringAndSize(NULL, bytesize); + if (v == NULL) + return NULL; + +@@ -2046,6 +2062,11 @@ + char *p; + + static const char *hexdigit = "0123456789abcdef"; ++#ifdef Py_UNICODE_WIDE ++ const Py_ssize_t expandsize = 10; ++#else ++ const Py_ssize_t expandsize = 6; ++#endif + + /* Initial allocation is based on the longest-possible unichr + escape. +@@ -2061,13 +2082,12 @@ + escape. + */ + ++ if (size > (PY_SSIZE_T_MAX - 2 - 1) / expandsize) ++ return PyErr_NoMemory(); ++ + repr = PyString_FromStringAndSize(NULL, + 2 +-#ifdef Py_UNICODE_WIDE +- + 10*size +-#else +- + 6*size +-#endif ++ + expandsize*size + + 1); + if (repr == NULL) + return NULL; +@@ -2320,12 +2340,16 @@ + char *q; + + static const char *hexdigit = "0123456789abcdef"; +- + #ifdef Py_UNICODE_WIDE +- repr = PyString_FromStringAndSize(NULL, 10 * size); ++ const Py_ssize_t expandsize = 10; + #else +- repr = PyString_FromStringAndSize(NULL, 6 * size); ++ const Py_ssize_t expandsize = 6; + #endif ++ ++ if (size > PY_SSIZE_T_MAX / expandsize) ++ return PyErr_NoMemory(); ++ ++ repr = PyString_FromStringAndSize(NULL, expandsize * size); + if (repr == NULL) + return NULL; + if (size == 0) +@@ -4761,6 +4785,11 @@ + return self; + } + ++ if (left > PY_SSIZE_T_MAX - self->length || ++ right > PY_SSIZE_T_MAX - (left + self->length)) { ++ PyErr_SetString(PyExc_OverflowError, "padded string is too long"); ++ return NULL; ++ } + u = _PyUnicode_New(left + self->length + right); + if (u) { + if (left) +Index: Objects/tupleobject.c +=================================================================== +--- Objects/tupleobject.c (revision 65261) ++++ Objects/tupleobject.c (working copy) +@@ -60,11 +60,12 @@ + Py_ssize_t nbytes = size * sizeof(PyObject *); + /* Check for overflow */ + if (nbytes / sizeof(PyObject *) != (size_t)size || +- (nbytes += sizeof(PyTupleObject) - sizeof(PyObject *)) +- <= 0) ++ (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *))) + { + return PyErr_NoMemory(); + } ++ nbytes += sizeof(PyTupleObject) - sizeof(PyObject *); ++ + op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size); + if (op == NULL) + return NULL; +Index: Objects/bufferobject.c +=================================================================== +--- Objects/bufferobject.c (revision 65261) ++++ Objects/bufferobject.c (working copy) +@@ -427,6 +427,10 @@ + count = 0; + if (!get_buf(self, &ptr, &size, ANY_BUFFER)) + return NULL; ++ if (count > PY_SSIZE_T_MAX / size) { ++ PyErr_SetString(PyExc_MemoryError, "result too large"); ++ return NULL; ++ } + ob = PyString_FromStringAndSize(NULL, size * count); + if ( ob == NULL ) + return NULL; +Index: Objects/longobject.c +=================================================================== +--- Objects/longobject.c (revision 65261) ++++ Objects/longobject.c (working copy) +@@ -70,6 +70,8 @@ + PyErr_NoMemory(); + return NULL; + } ++ /* XXX(nnorwitz): This can overflow -- ++ PyObject_NEW_VAR / _PyObject_VAR_SIZE need to detect overflow */ + return PyObject_NEW_VAR(PyLongObject, &PyLong_Type, size); + } + +Index: Objects/stringobject.c +=================================================================== +--- Objects/stringobject.c (revision 65261) ++++ Objects/stringobject.c (working copy) +@@ -75,6 +75,11 @@ + return (PyObject *)op; + } + ++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) { ++ PyErr_SetString(PyExc_OverflowError, "string is too large"); ++ return NULL; ++ } ++ + /* Inline PyObject_NewVar */ + op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size); + if (op == NULL) +@@ -971,14 +976,24 @@ + Py_INCREF(a); + return (PyObject *)a; + } ++ /* Check that string sizes are not negative, to prevent an ++ overflow in cases where we are passed incorrectly-created ++ strings with negative lengths (due to a bug in other code). ++ */ + size = a->ob_size + b->ob_size; +- if (size < 0) { ++ if (a->ob_size < 0 || b->ob_size < 0 || ++ a->ob_size > PY_SSIZE_T_MAX - b->ob_size) { + PyErr_SetString(PyExc_OverflowError, + "strings are too large to concat"); + return NULL; + } + + /* Inline PyObject_NewVar */ ++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) { ++ PyErr_SetString(PyExc_OverflowError, ++ "strings are too large to concat"); ++ return NULL; ++ } + op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size); + if (op == NULL) + return PyErr_NoMemory(); +Index: Lib/test/seq_tests.py +=================================================================== +--- Lib/test/seq_tests.py (revision 65261) ++++ Lib/test/seq_tests.py (working copy) +@@ -307,11 +307,13 @@ + self.assertEqual(id(s), id(s*1)) + + def test_bigrepeat(self): +- x = self.type2test([0]) +- x *= 2**16 +- self.assertRaises(MemoryError, x.__mul__, 2**16) +- if hasattr(x, '__imul__'): +- self.assertRaises(MemoryError, x.__imul__, 2**16) ++ import sys ++ if sys.maxint <= 2147483647: ++ x = self.type2test([0]) ++ x *= 2**16 ++ self.assertRaises(MemoryError, x.__mul__, 2**16) ++ if hasattr(x, '__imul__'): ++ self.assertRaises(MemoryError, x.__imul__, 2**16) + + def test_subscript(self): + a = self.type2test([10, 11]) +Index: Lib/test/test_strop.py +=================================================================== +--- Lib/test/test_strop.py (revision 65261) ++++ Lib/test/test_strop.py (working copy) +@@ -115,7 +115,26 @@ + strop.uppercase + strop.whitespace + ++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=5) ++ def test_stropjoin_huge_list(self, size): ++ a = "A" * size ++ try: ++ r = strop.join([a, a], a) ++ except OverflowError: ++ pass ++ else: ++ self.assertEquals(len(r), len(a) * 3) + ++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=1) ++ def test_stropjoin_huge_tup(self, size): ++ a = "A" * size ++ try: ++ r = strop.join((a, a), a) ++ except OverflowError: ++ pass # acceptable on 32-bit ++ else: ++ self.assertEquals(len(r), len(a) * 3) ++ + transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377' + + +Index: Lib/test/test_bigmem.py +=================================================================== +--- Lib/test/test_bigmem.py (revision 65261) ++++ Lib/test/test_bigmem.py (working copy) +@@ -1,5 +1,5 @@ + from test import test_support +-from test.test_support import bigmemtest, _1G, _2G ++from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest + + import unittest + import operator +@@ -53,7 +53,23 @@ + lpadsize += 1 + self.assertEquals(s[lpadsize:-rpadsize], SUBSTR) + self.assertEquals(s.strip(), SUBSTR.strip()) +- ++ ++ @precisionbigmemtest(size=_2G - 1, memuse=1) ++ def test_center_unicode(self, size): ++ SUBSTR = u' abc def ghi' ++ try: ++ s = SUBSTR.center(size) ++ except OverflowError: ++ pass # acceptable on 32-bit ++ else: ++ self.assertEquals(len(s), size) ++ lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2 ++ if len(s) % 2: ++ lpadsize += 1 ++ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR) ++ self.assertEquals(s.strip(), SUBSTR.strip()) ++ del s ++ + @bigmemtest(minsize=_2G, memuse=2) + def test_count(self, size): + SUBSTR = ' abc def ghi' +@@ -69,11 +85,45 @@ + def test_decode(self, size): + s = '.' * size + self.assertEquals(len(s.decode('utf-8')), size) ++ ++ def basic_encode_test(self, size, enc, c=u'.', expectedsize=None): ++ if expectedsize is None: ++ expectedsize = size ++ ++ s = c * size ++ self.assertEquals(len(s.encode(enc)), expectedsize) + + @bigmemtest(minsize=_2G + 2, memuse=3) + def test_encode(self, size): +- s = u'.' * size +- self.assertEquals(len(s.encode('utf-8')), size) ++ return self.basic_encode_test(size, 'utf-8') ++ ++ @precisionbigmemtest(size=_4G / 6 + 2, memuse=2) ++ def test_encode_raw_unicode_escape(self, size): ++ try: ++ return self.basic_encode_test(size, 'raw_unicode_escape') ++ except MemoryError: ++ pass # acceptable on 32-bit ++ ++ @precisionbigmemtest(size=_4G / 5 + 70, memuse=3) ++ def test_encode_utf7(self, size): ++ try: ++ return self.basic_encode_test(size, 'utf7') ++ except MemoryError: ++ pass # acceptable on 32-bit ++ ++ @precisionbigmemtest(size=_2G-1, memuse=2) ++ def test_decodeascii(self, size): ++ return self.basic_encode_test(size, 'ascii', c='A') ++ ++ @precisionbigmemtest(size=_4G / 5, memuse=6+2) ++ def test_unicode_repr_oflw(self, size): ++ try: ++ s = u"\uAAAA"*size ++ r = repr(s) ++ except MemoryError: ++ pass # acceptable on 32-bit ++ else: ++ self.failUnless(s == eval(r)) + + @bigmemtest(minsize=_2G, memuse=2) + def test_endswith(self, size): +@@ -458,7 +508,12 @@ + self.assertEquals(s[-1], "'") + self.assertEquals(s.count('\\'), size) + self.assertEquals(s.count('0'), size * 2) +- ++ ++ @bigmemtest(minsize=2**32 / 5, memuse=6+2) ++ def test_unicode_repr(self, size): ++ s = u"\uAAAA" * size ++ self.failUnless(len(repr(s)) > size) ++ + # This test is meaningful even with size < 2G, as long as the + # doubled string is > 2G (but it tests more if both are > 2G :) + @bigmemtest(minsize=_1G + 2, memuse=3) +@@ -642,6 +697,35 @@ + def test_repeat_large(self, size): + return self.basic_test_repeat(size) + ++ @bigmemtest(minsize=_1G - 1, memuse=12) ++ def test_repeat_large_2(self, size): ++ return self.basic_test_repeat(size) ++ ++ @precisionbigmemtest(size=_1G - 1, memuse=9) ++ def test_from_2G_generator(self, size): ++ try: ++ t = tuple(xrange(size)) ++ except MemoryError: ++ pass # acceptable on 32-bit ++ else: ++ count = 0 ++ for item in t: ++ self.assertEquals(item, count) ++ count += 1 ++ self.assertEquals(count, size) ++ ++ @precisionbigmemtest(size=_1G - 25, memuse=9) ++ def test_from_almost_2G_generator(self, size): ++ try: ++ t = tuple(xrange(size)) ++ count = 0 ++ for item in t: ++ self.assertEquals(item, count) ++ count += 1 ++ self.assertEquals(count, size) ++ except MemoryError: ++ pass # acceptable, expected on 32-bit ++ + # Like test_concat, split in two. + def basic_test_repr(self, size): + t = (0,) * size +@@ -957,9 +1041,35 @@ + self.assertEquals(l[:10], [1] * 10) + self.assertEquals(l[-10:], [5] * 10) + ++class BufferTest(unittest.TestCase): ++ ++ @precisionbigmemtest(size=_1G, memuse=4) ++ def test_repeat(self, size): ++ try: ++ b = buffer("AAAA")*size ++ except MemoryError: ++ pass # acceptable on 32-bit ++ else: ++ count = 0 ++ for c in b: ++ self.assertEquals(c, 'A') ++ count += 1 ++ self.assertEquals(count, size*4) ++ + def test_main(): +- test_support.run_unittest(StrTest, TupleTest, ListTest) ++ test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest) + ++# Expected failures (crashers) ++# del StrTest.test_center_unicode ++del StrTest.test_decodeascii ++# del StrTest.test_encode_utf32 ++# del StrTest.test_encode_utf7 ++# del StrTest.test_encode_raw_unicode_escape ++# ++# del TupleTest.test_from_2G_generator ++# ++# del BufferTest.test_repeat ++ + if __name__ == '__main__': + if len(sys.argv) > 1: + test_support.set_memlimit(sys.argv[1]) +Index: Lib/test/test_support.py +=================================================================== +--- Lib/test/test_support.py (revision 65261) ++++ Lib/test/test_support.py (working copy) +@@ -33,6 +33,7 @@ + use_resources = None # Flag set to [] by regrtest.py + max_memuse = 0 # Disable bigmem tests (they will still be run with + # small sizes, to make sure they work.) ++real_max_memuse = 0 + + # _original_stdout is meant to hold stdout at the time regrtest began. + # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. +@@ -323,6 +324,7 @@ + _1M = 1024*1024 + _1G = 1024 * _1M + _2G = 2 * _1G ++_4G = 4 * _1G + + # Hack to get at the maximum value an internal index can take. + class _Dummy: +@@ -333,6 +335,7 @@ + def set_memlimit(limit): + import re + global max_memuse ++ global real_max_memuse + sizes = { + 'k': 1024, + 'm': _1M, +@@ -344,6 +347,7 @@ + if m is None: + raise ValueError('Invalid memory limit %r' % (limit,)) + memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) ++ real_max_memuse = memlimit + if memlimit > MAX_Py_ssize_t: + memlimit = MAX_Py_ssize_t + if memlimit < _2G - 1: +@@ -389,6 +393,27 @@ + return wrapper + return decorator + ++def precisionbigmemtest(size, memuse, overhead=5*_1M): ++ def decorator(f): ++ def wrapper(self): ++ if not real_max_memuse: ++ maxsize = 5147 ++ else: ++ maxsize = size ++ ++ if real_max_memuse and real_max_memuse < maxsize * memuse: ++ if verbose: ++ sys.stderr.write("Skipping %s because of memory " ++ "constraint\n" % (f.__name__,)) ++ return ++ ++ return f(self, maxsize) ++ wrapper.size = size ++ wrapper.memuse = memuse ++ wrapper.overhead = overhead ++ return wrapper ++ return decorator ++ + def bigaddrspacetest(f): + """Decorator for tests that fill the address space.""" + def wrapper(self): +Index: Modules/mmapmodule.c +=================================================================== +--- Modules/mmapmodule.c (revision 65261) ++++ Modules/mmapmodule.c (working copy) +@@ -223,7 +223,7 @@ + return(NULL); + + /* silently 'adjust' out-of-range requests */ +- if ((self->pos + num_bytes) > self->size) { ++ if (num_bytes > self->size - self->pos) { + num_bytes -= (self->pos+num_bytes) - self->size; + } + result = Py_BuildValue("s#", self->data+self->pos, num_bytes); +Index: Modules/stropmodule.c +=================================================================== +--- Modules/stropmodule.c (revision 65261) ++++ Modules/stropmodule.c (working copy) +@@ -216,6 +216,13 @@ + return NULL; + } + slen = PyString_GET_SIZE(item); ++ if (slen > PY_SSIZE_T_MAX - reslen || ++ seplen > PY_SSIZE_T_MAX - reslen - seplen) { ++ PyErr_SetString(PyExc_OverflowError, ++ "input too long"); ++ Py_DECREF(res); ++ return NULL; ++ } + while (reslen + slen + seplen >= sz) { + if (_PyString_Resize(&res, sz * 2) < 0) + return NULL; +@@ -253,6 +260,14 @@ + return NULL; + } + slen = PyString_GET_SIZE(item); ++ if (slen > PY_SSIZE_T_MAX - reslen || ++ seplen > PY_SSIZE_T_MAX - reslen - seplen) { ++ PyErr_SetString(PyExc_OverflowError, ++ "input too long"); ++ Py_DECREF(res); ++ Py_XDECREF(item); ++ return NULL; ++ } + while (reslen + slen + seplen >= sz) { + if (_PyString_Resize(&res, sz * 2) < 0) { + Py_DECREF(item); +Index: Modules/gcmodule.c +=================================================================== +--- Modules/gcmodule.c (revision 65261) ++++ Modules/gcmodule.c (working copy) +@@ -1318,7 +1318,10 @@ + _PyObject_GC_Malloc(size_t basicsize) + { + PyObject *op; +- PyGC_Head *g = (PyGC_Head *)PyObject_MALLOC( ++ PyGC_Head *g; ++ if (basicsize > PY_SSIZE_T_MAX - sizeof(PyGC_Head)) ++ return PyErr_NoMemory(); ++ g = (PyGC_Head *)PyObject_MALLOC( + sizeof(PyGC_Head) + basicsize); + if (g == NULL) + return PyErr_NoMemory(); +@@ -1361,6 +1364,8 @@ + { + const size_t basicsize = _PyObject_VAR_SIZE(op->ob_type, nitems); + PyGC_Head *g = AS_GC(op); ++ if (basicsize > PY_SSIZE_T_MAX - sizeof(PyGC_Head)) ++ return (PyVarObject *)PyErr_NoMemory(); + g = (PyGC_Head *)PyObject_REALLOC(g, sizeof(PyGC_Head) + basicsize); + if (g == NULL) + return (PyVarObject *)PyErr_NoMemory(); diff --git a/python-2.5.2-CVE-2008-2316-hashlib.patch b/python-2.5.2-CVE-2008-2316-hashlib.patch new file mode 100644 index 0000000..fba7768 --- /dev/null +++ b/python-2.5.2-CVE-2008-2316-hashlib.patch @@ -0,0 +1,151 @@ +Index: Lib/test/test_hashlib.py +================================================================================ +--- Lib/test/test_hashlib.py ++++ Lib/test/test_hashlib.py +@@ -9,7 +9,7 @@ + import hashlib + import unittest + from test import test_support +- ++from test.test_support import _4G, precisionbigmemtest + + def hexstr(s): + import string +@@ -55,7 +55,6 @@ + m2.update(aas + bees + cees) + self.assertEqual(m1.digest(), m2.digest()) + +- + def check(self, name, data, digest): + # test the direct constructors + computed = getattr(hashlib, name)(data).hexdigest() +@@ -74,7 +73,22 @@ + def test_case_md5_2(self): + self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'd174ab98d277d9f5a5611c2c9f419d9f') +- ++ ++ @precisionbigmemtest(size=_4G + 5, memuse=1) ++ def test_case_md5_huge(self, size): ++ if size == _4G + 5: ++ try: ++ self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d') ++ except OverflowError: ++ pass # 32-bit arch ++ ++ @precisionbigmemtest(size=_4G - 1, memuse=1) ++ def test_case_md5_uintmax(self, size): ++ if size == _4G - 1: ++ try: ++ self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3') ++ except OverflowError: ++ pass # 32-bit arch + + # use the three examples from Federal Information Processing Standards + # Publication 180-1, Secure Hash Standard, 1995 April 17 +--- Modules/_hashopenssl.c ++++ Modules/_hashopenssl.c +@@ -19,6 +19,8 @@ + /* EVP is the preferred interface to hashing in OpenSSL */ + #include + ++#define MUNCH_SIZE INT_MAX ++ + + #ifndef HASH_OBJ_CONSTRUCTOR + #define HASH_OBJ_CONSTRUCTOR 0 +@@ -164,9 +166,18 @@ + if (!PyArg_ParseTuple(args, "s#:update", &cp, &len)) + return NULL; + ++ if (len > 0 && len <= MUNCH_SIZE) { + EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, + unsigned int)); +- ++ } else { ++ Py_ssize_t offset = 0; ++ while (len) { ++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len; ++ EVP_DigestUpdate(&self->ctx, cp + offset, process); ++ len -= process; ++ offset += process; ++ } ++ } + Py_INCREF(Py_None); + return Py_None; + } +@@ -255,10 +266,21 @@ + self->name = name_obj; + Py_INCREF(self->name); + +- if (cp && len) ++ if (cp && len) { ++ if (len > 0 && len <= MUNCH_SIZE) { + EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, + unsigned int)); +- ++ } else { ++ Py_ssize_t offset = 0; ++ while (len) { ++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len; ++ EVP_DigestUpdate(&self->ctx, cp + offset, process); ++ len -= process; ++ offset += process; ++ } ++ } ++ } ++ + return 0; + } + #endif +@@ -328,7 +350,7 @@ + static PyObject * + EVPnew(PyObject *name_obj, + const EVP_MD *digest, const EVP_MD_CTX *initial_ctx, +- const unsigned char *cp, unsigned int len) ++ const unsigned char *cp, Py_ssize_t len) + { + EVPobject *self; + +@@ -346,8 +368,20 @@ + EVP_DigestInit(&self->ctx, digest); + } + +- if (cp && len) +- EVP_DigestUpdate(&self->ctx, cp, len); ++ if (cp && len) { ++ if (len > 0 && len <= MUNCH_SIZE) { ++ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, ++ unsigned int)); ++ } else { ++ Py_ssize_t offset = 0; ++ while (len) { ++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len; ++ EVP_DigestUpdate(&self->ctx, cp + offset, process); ++ len -= process; ++ offset += process; ++ } ++ } ++ } + + return (PyObject *)self; + } +@@ -384,8 +418,7 @@ + + digest = EVP_get_digestbyname(name); + +- return EVPnew(name_obj, digest, NULL, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, +- unsigned int)); ++ return EVPnew(name_obj, digest, NULL, cp, len); + } + + /* +@@ -410,7 +443,7 @@ + CONST_ ## NAME ## _name_obj, \ + NULL, \ + CONST_new_ ## NAME ## _ctx_p, \ +- cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, unsigned int)); \ ++ cp, len); \ + } + + /* a PyMethodDef structure for the constructor */ diff --git a/python-2.5.2-CVE-2008-3142-pymem-resize.patch b/python-2.5.2-CVE-2008-3142-pymem-resize.patch new file mode 100644 index 0000000..23c3783 --- /dev/null +++ b/python-2.5.2-CVE-2008-3142-pymem-resize.patch @@ -0,0 +1,154 @@ +Index: Include/pymem.h +================================================================================ +--- Include/pymem.h ++++ Include/pymem.h +@@ -67,8 +67,12 @@ + for malloc(0), which would be treated as an error. Some platforms + would return a pointer with no memory behind it, which would break + pymalloc. To solve these problems, allocate an extra byte. */ +-#define PyMem_MALLOC(n) malloc((n) ? (n) : 1) +-#define PyMem_REALLOC(p, n) realloc((p), (n) ? (n) : 1) ++/* Returns NULL to indicate error if a negative size or size larger than ++ Py_ssize_t can represent is supplied. Helps prevents security holes. */ ++#define PyMem_MALLOC(n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \ ++ : malloc((n) ? (n) : 1)) ++#define PyMem_REALLOC(p, n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \ ++ : realloc((p), (n) ? (n) : 1)) + #define PyMem_FREE free + + #endif /* PYMALLOC_DEBUG */ +@@ -77,24 +81,31 @@ + * Type-oriented memory interface + * ============================== + * +- * These are carried along for historical reasons. There's rarely a good +- * reason to use them anymore (you can just as easily do the multiply and +- * cast yourself). ++ * Allocate memory for n objects of the given type. Returns a new pointer ++ * or NULL if the request was too large or memory allocation failed. Use ++ * these macros rather than doing the multiplication yourself so that proper ++ * overflow checking is always done. + */ + + #define PyMem_New(type, n) \ +- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \ ++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \ + ( (type *) PyMem_Malloc((n) * sizeof(type)) ) ) + #define PyMem_NEW(type, n) \ +- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \ ++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \ + ( (type *) PyMem_MALLOC((n) * sizeof(type)) ) ) + ++/* ++ * The value of (p) is always clobbered by this macro regardless of success. ++ * The caller MUST check if (p) is NULL afterwards and deal with the memory ++ * error if so. This means the original value of (p) MUST be saved for the ++ * caller's memory error handler to not lose track of it. ++ */ + #define PyMem_Resize(p, type, n) \ +- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \ +- ( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) ) ++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \ ++ (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) + #define PyMem_RESIZE(p, type, n) \ +- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \ +- ( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) ) ++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \ ++ (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) + + /* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used + * anymore. They're just confusing aliases for PyMem_{Free,FREE} now. +--- Modules/almodule.c ++++ Modules/almodule.c +@@ -1633,9 +1633,11 @@ + if (nvals < 0) + goto cleanup; + if (nvals > setsize) { ++ ALvalue *old_return_set = return_set; + setsize = nvals; + PyMem_RESIZE(return_set, ALvalue, setsize); + if (return_set == NULL) { ++ return_set = old_return_set; + PyErr_NoMemory(); + goto cleanup; + } +--- Modules/arraymodule.c ++++ Modules/arraymodule.c +@@ -816,6 +816,7 @@ + array_do_extend(arrayobject *self, PyObject *bb) + { + Py_ssize_t size; ++ char *old_item; + + if (!array_Check(bb)) + return array_iter_extend(self, bb); +@@ -831,8 +832,10 @@ + return -1; + } + size = self->ob_size + b->ob_size; ++ old_item = self->ob_item; + PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize); + if (self->ob_item == NULL) { ++ self->ob_item = old_item; + PyObject_Del(self); + PyErr_NoMemory(); + return -1; +@@ -886,7 +889,7 @@ + if (size > PY_SSIZE_T_MAX / n) { + return PyErr_NoMemory(); + } +- PyMem_Resize(items, char, n * size); ++ PyMem_RESIZE(items, char, n * size); + if (items == NULL) + return PyErr_NoMemory(); + p = items; +--- Modules/selectmodule.c ++++ Modules/selectmodule.c +@@ -349,10 +349,12 @@ + { + Py_ssize_t i, pos; + PyObject *key, *value; ++ struct pollfd *old_ufds = self->ufds; + + self->ufd_len = PyDict_Size(self->dict); +- PyMem_Resize(self->ufds, struct pollfd, self->ufd_len); ++ PyMem_RESIZE(self->ufds, struct pollfd, self->ufd_len); + if (self->ufds == NULL) { ++ self->ufds = old_ufds; + PyErr_NoMemory(); + return 0; + } +--- Objects/obmalloc.c ++++ Objects/obmalloc.c +@@ -727,6 +727,15 @@ + uint size; + + /* ++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes. ++ * Most python internals blindly use a signed Py_ssize_t to track ++ * things without checking for overflows or negatives. ++ * As size_t is unsigned, checking for nbytes < 0 is not required. ++ */ ++ if (nbytes > PY_SSIZE_T_MAX) ++ return NULL; ++ ++ /* + * This implicitly redirects malloc(0). + */ + if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) { +@@ -1130,6 +1139,15 @@ + if (p == NULL) + return PyObject_Malloc(nbytes); + ++ /* ++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes. ++ * Most python internals blindly use a signed Py_ssize_t to track ++ * things without checking for overflows or negatives. ++ * As size_t is unsigned, checking for nbytes < 0 is not required. ++ */ ++ if (nbytes > PY_SSIZE_T_MAX) ++ return NULL; ++ + pool = POOL_ADDR(p); + if (Py_ADDRESS_IN_RANGE(p, pool)) { + /* We're in charge of this block */ diff --git a/python-2.5.2-CVE-2008-3144-snprintf-over-underflow.patch b/python-2.5.2-CVE-2008-3144-snprintf-over-underflow.patch new file mode 100644 index 0000000..150810a --- /dev/null +++ b/python-2.5.2-CVE-2008-3144-snprintf-over-underflow.patch @@ -0,0 +1,57 @@ +Index: Python/mysnprintf.c +=================================================================== +--- Python/mysnprintf.c (revision 63882) ++++ Python/mysnprintf.c (revision 63883) +@@ -54,18 +54,28 @@ + PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) + { + int len; /* # bytes written, excluding \0 */ +-#ifndef HAVE_SNPRINTF ++#ifdef HAVE_SNPRINTF ++#define _PyOS_vsnprintf_EXTRA_SPACE 1 ++#else ++#define _PyOS_vsnprintf_EXTRA_SPACE 512 + char *buffer; + #endif + assert(str != NULL); + assert(size > 0); + assert(format != NULL); ++ /* We take a size_t as input but return an int. Sanity check ++ * our input so that it won't cause an overflow in the ++ * vsnprintf return value or the buffer malloc size. */ ++ if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) { ++ len = -666; ++ goto Done; ++ } + + #ifdef HAVE_SNPRINTF + len = vsnprintf(str, size, format, va); + #else + /* Emulate it. */ +- buffer = PyMem_MALLOC(size + 512); ++ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE); + if (buffer == NULL) { + len = -666; + goto Done; +@@ -75,7 +85,7 @@ + if (len < 0) + /* ignore the error */; + +- else if ((size_t)len >= size + 512) ++ else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE) + Py_FatalError("Buffer overflow in PyOS_snprintf/PyOS_vsnprintf"); + + else { +@@ -86,8 +96,10 @@ + str[to_copy] = '\0'; + } + PyMem_FREE(buffer); ++#endif + Done: +-#endif +- str[size-1] = '\0'; ++ if (size > 0) ++ str[size-1] = '\0'; + return len; ++#undef _PyOS_vsnprintf_EXTRA_SPACE + } diff --git a/python-2.5.2-expandtabs.patch b/python-2.5.2-expandtabs.patch new file mode 100644 index 0000000..0ca5e4b --- /dev/null +++ b/python-2.5.2-expandtabs.patch @@ -0,0 +1,219 @@ +Index: Objects/unicodeobject.c +=================================================================== +--- Objects/unicodeobject.c (revision 61348) ++++ Objects/unicodeobject.c (revision 61349) +@@ -5689,7 +5689,8 @@ + Py_UNICODE *e; + Py_UNICODE *p; + Py_UNICODE *q; +- Py_ssize_t i, j, old_j; ++ Py_UNICODE *qe; ++ Py_ssize_t i, j, incr; + PyUnicodeObject *u; + int tabsize = 8; + +@@ -5697,63 +5698,70 @@ + return NULL; + + /* First pass: determine size of output string */ +- i = j = old_j = 0; +- e = self->str + self->length; ++ i = 0; /* chars up to and including most recent \n or \r */ ++ j = 0; /* chars since most recent \n or \r (use in tab calculations) */ ++ e = self->str + self->length; /* end of input */ + for (p = self->str; p < e; p++) + if (*p == '\t') { + if (tabsize > 0) { +- j += tabsize - (j % tabsize); +- if (old_j > j) { +- PyErr_SetString(PyExc_OverflowError, +- "new string is too long"); +- return NULL; +- } +- old_j = j; +- } ++ incr = tabsize - (j % tabsize); /* cannot overflow */ ++ if (j > PY_SSIZE_T_MAX - incr) ++ goto overflow1; ++ j += incr; ++ } + } + else { ++ if (j > PY_SSIZE_T_MAX - 1) ++ goto overflow1; + j++; + if (*p == '\n' || *p == '\r') { ++ if (i > PY_SSIZE_T_MAX - j) ++ goto overflow1; + i += j; +- old_j = j = 0; +- if (i < 0) { +- PyErr_SetString(PyExc_OverflowError, +- "new string is too long"); +- return NULL; +- } ++ j = 0; + } + } + +- if ((i + j) < 0) { +- PyErr_SetString(PyExc_OverflowError, "new string is too long"); +- return NULL; +- } ++ if (i > PY_SSIZE_T_MAX - j) ++ goto overflow1; + + /* Second pass: create output string and fill it */ + u = _PyUnicode_New(i + j); + if (!u) + return NULL; + +- j = 0; +- q = u->str; ++ j = 0; /* same as in first pass */ ++ q = u->str; /* next output char */ ++ qe = u->str + u->length; /* end of output */ + + for (p = self->str; p < e; p++) + if (*p == '\t') { + if (tabsize > 0) { + i = tabsize - (j % tabsize); + j += i; +- while (i--) ++ while (i--) { ++ if (q >= qe) ++ goto overflow2; + *q++ = ' '; ++ } + } + } + else { ++ if (q >= qe) ++ goto overflow2; ++ *q++ = *p; + j++; +- *q++ = *p; + if (*p == '\n' || *p == '\r') + j = 0; + } + + return (PyObject*) u; ++ ++ overflow2: ++ Py_DECREF(u); ++ overflow1: ++ PyErr_SetString(PyExc_OverflowError, "new string is too long"); ++ return NULL; + } + + PyDoc_STRVAR(find__doc__, +Index: Objects/stringobject.c +=================================================================== +--- Objects/stringobject.c (revision 61348) ++++ Objects/stringobject.c (revision 61349) +@@ -3299,9 +3299,9 @@ + static PyObject* + string_expandtabs(PyStringObject *self, PyObject *args) + { +- const char *e, *p; ++ const char *e, *p, *qe; + char *q; +- Py_ssize_t i, j, old_j; ++ Py_ssize_t i, j, incr; + PyObject *u; + int tabsize = 8; + +@@ -3309,63 +3309,70 @@ + return NULL; + + /* First pass: determine size of output string */ +- i = j = old_j = 0; +- e = PyString_AS_STRING(self) + PyString_GET_SIZE(self); ++ i = 0; /* chars up to and including most recent \n or \r */ ++ j = 0; /* chars since most recent \n or \r (use in tab calculations) */ ++ e = PyString_AS_STRING(self) + PyString_GET_SIZE(self); /* end of input */ + for (p = PyString_AS_STRING(self); p < e; p++) + if (*p == '\t') { + if (tabsize > 0) { +- j += tabsize - (j % tabsize); +- if (old_j > j) { +- PyErr_SetString(PyExc_OverflowError, +- "new string is too long"); +- return NULL; +- } +- old_j = j; ++ incr = tabsize - (j % tabsize); ++ if (j > PY_SSIZE_T_MAX - incr) ++ goto overflow1; ++ j += incr; + } + } + else { ++ if (j > PY_SSIZE_T_MAX - 1) ++ goto overflow1; + j++; + if (*p == '\n' || *p == '\r') { ++ if (i > PY_SSIZE_T_MAX - j) ++ goto overflow1; + i += j; +- old_j = j = 0; +- if (i < 0) { +- PyErr_SetString(PyExc_OverflowError, +- "new string is too long"); +- return NULL; +- } ++ j = 0; + } + } + +- if ((i + j) < 0) { +- PyErr_SetString(PyExc_OverflowError, "new string is too long"); +- return NULL; +- } ++ if (i > PY_SSIZE_T_MAX - j) ++ goto overflow1; + + /* Second pass: create output string and fill it */ + u = PyString_FromStringAndSize(NULL, i + j); + if (!u) + return NULL; + +- j = 0; +- q = PyString_AS_STRING(u); ++ j = 0; /* same as in first pass */ ++ q = PyString_AS_STRING(u); /* next output char */ ++ qe = PyString_AS_STRING(u) + PyString_GET_SIZE(u); /* end of output */ + + for (p = PyString_AS_STRING(self); p < e; p++) + if (*p == '\t') { + if (tabsize > 0) { + i = tabsize - (j % tabsize); + j += i; +- while (i--) ++ while (i--) { ++ if (q >= qe) ++ goto overflow2; + *q++ = ' '; ++ } + } + } + else { ++ if (q >= qe) ++ goto overflow2; ++ *q++ = *p; + j++; +- *q++ = *p; + if (*p == '\n' || *p == '\r') + j = 0; + } + + return u; ++ ++ overflow2: ++ Py_DECREF(u); ++ overflow1: ++ PyErr_SetString(PyExc_OverflowError, "new string is too long"); ++ return NULL; + } + + Py_LOCAL_INLINE(PyObject *) diff --git a/python-doc.spec b/python-doc.spec index 40f5143..98d3dad 100644 --- a/python-doc.spec +++ b/python-doc.spec @@ -2,9 +2,16 @@ # spec file for package python-doc (Version 2.5.1) # # Copyright (c) 2008 SUSE LINUX Products GmbH, Nuernberg, Germany. -# This file and all modifications and additions to the pristine -# package are under the same license as the package itself. # +# All modifications and additions to the file contributed by third parties +# remain the property of their copyright owners, unless otherwise agreed +# upon. The license for this file, and modifications and additions to the +# file, is the same license as for the pristine package itself (unless the +# license for the pristine package is not an Open Source License, in which +# case the license is the MIT License). An "Open Source License" is a +# license that conforms to the Open Source Definition (Version 1.9) +# published by the Open Source Initiative. + # Please submit bugfixes or comments via http://bugs.opensuse.org/ # @@ -17,7 +24,7 @@ Group: Development/Languages/Python BuildRoot: %{_tmppath}/%{name}-%{version}-build Summary: Additional Package Documentation for Python. Version: 2.5.1 -Release: 81 +Release: 91 %define pyver 2.5.2 BuildArch: noarch %define tarname Python-%{pyver} diff --git a/python.changes b/python.changes index 933910c..03bc1ae 100644 --- a/python.changes +++ b/python.changes @@ -1,3 +1,14 @@ +------------------------------------------------------------------- +Wed Jul 30 20:35:02 CEST 2008 - matejcik@suse.cz + +- security fixes for issues mentioned in bnc#406051: + * CVE-2008-2315 - multiple integer overflows in basic types + * CVE-2008-2316 - partial hashing of huge data with hashlib + * CVE-2008-3142 - multiple buffer oveflows in unicode processing + * CVE-2008-3144 - possible integer over/underflow in mysnprintf + * buffer overflows in expandtabs() method (afaik no CVE assigned) +- also mentioned CVE-2008-3143 is already fixed in python 2.5.2 + ------------------------------------------------------------------- Mon Jun 30 15:38:17 CEST 2008 - schwab@suse.de diff --git a/python.spec b/python.spec index a51c06f..deafed6 100644 --- a/python.spec +++ b/python.spec @@ -2,9 +2,16 @@ # spec file for package python (Version 2.5.2) # # Copyright (c) 2008 SUSE LINUX Products GmbH, Nuernberg, Germany. -# This file and all modifications and additions to the pristine -# package are under the same license as the package itself. # +# All modifications and additions to the file contributed by third parties +# remain the property of their copyright owners, unless otherwise agreed +# upon. The license for this file, and modifications and additions to the +# file, is the same license as for the pristine package itself (unless the +# license for the pristine package is not an Open Source License, in which +# case the license is the MIT License). An "Open Source License" is a +# license that conforms to the Open Source Definition (Version 1.9) +# published by the Open Source Initiative. + # Please submit bugfixes or comments via http://bugs.opensuse.org/ # @@ -20,7 +27,7 @@ AutoReqProv: on Obsoletes: python-nothreads python21 python-elementtree python-sqlite Summary: Python Interpreter Version: 2.5.2 -Release: 31 +Release: 43 %define tarname Python-%{version} Source0: %{tarname}.tar.bz2 Source6: README.SUSE @@ -44,6 +51,11 @@ Patch32: disable-dns-test.diff Patch33: python-2.5.2-from-string-and-size.patch Patch34: python-2.5.2-docdirs.patch Patch35: python-2.5.2-configure.patch +Patch36: python-2.5.2-expandtabs.patch +Patch37: python-2.5.2-CVE-2008-3142-pymem-resize.patch +Patch38: python-2.5.2-CVE-2008-2315-int-overflows.patch +Patch39: python-2.5.2-CVE-2008-2316-hashlib.patch +Patch40: python-2.5.2-CVE-2008-3144-snprintf-over-underflow.patch BuildRoot: %{_tmppath}/%{name}-%{version}-build %define python_version %(echo %{version} | head -c 3) %define idle_name idle @@ -218,6 +230,11 @@ Python2.x, it is part of the core Python distribution. %patch33 -p2 %patch34 %patch35 +%patch36 +%patch37 +%patch38 +%patch39 +%patch40 # some cleanup find . -name .cvsignore -type f -print0 | xargs -0 rm -f find . -name CVS -type d -print0 | xargs -0 rm -rf @@ -491,6 +508,14 @@ rm -rf $RPM_BUILD_ROOT %{_bindir}/python2 %changelog +* Wed Jul 30 2008 matejcik@suse.cz +- security fixes for issues mentioned in bnc#406051: + * CVE-2008-2315 - multiple integer overflows in basic types + * CVE-2008-2316 - partial hashing of huge data with hashlib + * CVE-2008-3142 - multiple buffer oveflows in unicode processing + * CVE-2008-3144 - possible integer over/underflow in mysnprintf + * buffer overflows in expandtabs() method (afaik no CVE assigned) +- also mentioned CVE-2008-3143 is already fixed in python 2.5.2 * Mon Jun 30 2008 schwab@suse.de - Work around autoheader bug. * Fri Jun 13 2008 schwab@suse.de