diff --git a/denose-eventlet.patch b/denose-eventlet.patch deleted file mode 100644 index 3a540d9..0000000 --- a/denose-eventlet.patch +++ /dev/null @@ -1,432 +0,0 @@ ---- - eventlet.egg-info/SOURCES.txt | 3 - setup.py | 2 - tests/__init__.py | 3 - tests/dagpool_test.py | 149 ++++++++++++++++++++---------------------- - tests/greenio_test.py | 8 -- - tests/nosewrapper.py | 13 --- - 6 files changed, 83 insertions(+), 95 deletions(-) - ---- a/eventlet.egg-info/SOURCES.txt -+++ b/eventlet.egg-info/SOURCES.txt -@@ -175,7 +175,6 @@ tests/greenthread_test.py - tests/hub_test.py - tests/mock.py - tests/mysqldb_test.py --tests/nosewrapper.py - tests/openssl_test.py - tests/os_test.py - tests/parse_results.py -@@ -275,4 +274,4 @@ tests/stdlib/test_threading_local.py - tests/stdlib/test_timeout.py - tests/stdlib/test_urllib.py - tests/stdlib/test_urllib2.py --tests/stdlib/test_urllib2_localnet.py -\ No newline at end of file -+tests/stdlib/test_urllib2_localnet.py ---- a/setup.py -+++ b/setup.py -@@ -30,7 +30,7 @@ setuptools.setup( - 'README.rst' - ) - ).read(), -- test_suite='nose.collector', -+ test_suite='tests', - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", ---- a/tests/__init__.py -+++ b/tests/__init__.py -@@ -20,7 +20,7 @@ import sys - import unittest - import warnings - --from nose.plugins.skip import SkipTest -+from unittest import SkipTest - - import eventlet - from eventlet import tpool -@@ -223,7 +223,6 @@ class LimitedTestCase(unittest.TestCase) - def check_idle_cpu_usage(duration, allowed_part): - if resource is None: - # TODO: use https://code.google.com/p/psutil/ -- from nose.plugins.skip import SkipTest - raise SkipTest('CPU usage testing not supported (`import resource` failed)') - - r1 = resource.getrusage(resource.RUSAGE_SELF) ---- a/tests/dagpool_test.py -+++ b/tests/dagpool_test.py -@@ -5,7 +5,6 @@ - @brief Test DAGPool class - """ - --from nose.tools import * - import eventlet - from eventlet.dagpool import DAGPool, Collision, PropagateError - import six -@@ -13,8 +12,8 @@ from contextlib import contextmanager - import itertools - - --# Not all versions of nose.tools.assert_raises() support the usage in this --# module, but it's straightforward enough to code that explicitly. -+# Not all versions of assert_raises() support the usage in this module, -+# but it's straightforward enough to code that explicitly. - @contextmanager - def assert_raises(exc): - """exc is an exception class""" -@@ -163,7 +162,7 @@ class Capture(object): - # a set. Make a set containing its elements. - setlist.append(set(subseq)) - # Now that we've massaged 'sequence' into 'setlist', compare. -- assert_equal(self.sequence, setlist) -+ assert self.sequence == setlist - - - # **************************************************************************** -@@ -191,14 +190,14 @@ def test_init(): - with check_no_suspend(): - results = pool.waitall() - # with no spawn() or post(), waitall() returns preload data -- assert_equals(results, dict(a=1, b=2, c=3)) -+ assert results == dict(a=1, b=2, c=3) - - # preload sequence of pairs - pool = DAGPool([("d", 4), ("e", 5), ("f", 6)]) - # this must not hang - with check_no_suspend(): - results = pool.waitall() -- assert_equals(results, dict(d=4, e=5, f=6)) -+ assert results == dict(d=4, e=5, f=6) - - - def test_wait_each_empty(): -@@ -216,10 +215,10 @@ def test_wait_each_preload(): - with check_no_suspend(): - # wait_each() may deliver in arbitrary order; collect into a dict - # for comparison -- assert_equals(dict(pool.wait_each("abc")), dict(a=1, b=2, c=3)) -+ assert dict(pool.wait_each("abc")) == dict(a=1, b=2, c=3) - - # while we're at it, test wait() for preloaded keys -- assert_equals(pool.wait("bc"), dict(b=2, c=3)) -+ assert pool.wait("bc") == dict(b=2, c=3) - - - def post_each(pool, capture): -@@ -257,7 +256,7 @@ def test_wait_posted(): - eventlet.spawn(post_each, pool, capture) - gotten = pool.wait("bcdefg") - capture.add("got all") -- assert_equals(gotten, -+ assert (gotten == - dict(b=2, c=3, - d="dval", e="eval", - f="fval", g="gval")) -@@ -285,7 +284,7 @@ def test_spawn_collision_spawn(): - pool = DAGPool() - pool.spawn("a", (), lambda key, results: "aval") - # hasn't yet even started -- assert_equals(pool.get("a"), None) -+ assert pool.get("a") == None - with assert_raises(Collision): - # Attempting to spawn again with same key should collide even if the - # first spawned greenthread hasn't yet had a chance to run. -@@ -293,7 +292,7 @@ def test_spawn_collision_spawn(): - # now let the spawned eventlet run - eventlet.sleep(0) - # should have finished -- assert_equals(pool.get("a"), "aval") -+ assert pool.get("a") == "aval" - with assert_raises(Collision): - # Attempting to spawn with same key collides even when the greenthread - # has completed. -@@ -324,60 +323,60 @@ def test_spawn_multiple(): - capture.step() - # but none of them has yet produced a result - for k in "defgh": -- assert_equals(pool.get(k), None) -- assert_equals(set(pool.keys()), set("abc")) -- assert_equals(dict(pool.items()), dict(a=1, b=2, c=3)) -- assert_equals(pool.running(), 5) -- assert_equals(set(pool.running_keys()), set("defgh")) -- assert_equals(pool.waiting(), 1) -- assert_equals(pool.waiting_for(), dict(h=set("defg"))) -- assert_equals(pool.waiting_for("d"), set()) -- assert_equals(pool.waiting_for("c"), set()) -+ assert pool.get(k) == None -+ assert set(pool.keys()) == set("abc") -+ assert dict(pool.items()) == dict(a=1, b=2, c=3) -+ assert pool.running() == 5 -+ assert set(pool.running_keys()) == set("defgh") -+ assert pool.waiting() == 1 -+ assert pool.waiting_for() == dict(h=set("defg")) -+ assert pool.waiting_for("d") == set() -+ assert pool.waiting_for("c") == set() - with assert_raises(KeyError): - pool.waiting_for("j") -- assert_equals(pool.waiting_for("h"), set("defg")) -+ assert pool.waiting_for("h") == set("defg") - - # let one of the upstream greenthreads complete - events["f"].send("fval") - spin() - capture.step() -- assert_equals(pool.get("f"), "fval") -- assert_equals(set(pool.keys()), set("abcf")) -- assert_equals(dict(pool.items()), dict(a=1, b=2, c=3, f="fval")) -- assert_equals(pool.running(), 4) -- assert_equals(set(pool.running_keys()), set("degh")) -- assert_equals(pool.waiting(), 1) -- assert_equals(pool.waiting_for("h"), set("deg")) -+ assert pool.get("f") == "fval" -+ assert set(pool.keys()) == set("abcf") -+ assert dict(pool.items()) == dict(a=1, b=2, c=3, f="fval") -+ assert pool.running() == 4 -+ assert set(pool.running_keys()) == set("degh") -+ assert pool.waiting() == 1 -+ assert pool.waiting_for("h") == set("deg") - - # now two others - events["e"].send("eval") - events["g"].send("gval") - spin() - capture.step() -- assert_equals(pool.get("e"), "eval") -- assert_equals(pool.get("g"), "gval") -- assert_equals(set(pool.keys()), set("abcefg")) -- assert_equals(dict(pool.items()), -+ assert pool.get("e") == "eval" -+ assert pool.get("g") == "gval" -+ assert set(pool.keys()) == set("abcefg") -+ assert (dict(pool.items()) == - dict(a=1, b=2, c=3, e="eval", f="fval", g="gval")) -- assert_equals(pool.running(), 2) -- assert_equals(set(pool.running_keys()), set("dh")) -- assert_equals(pool.waiting(), 1) -- assert_equals(pool.waiting_for("h"), set("d")) -+ assert pool.running() == 2 -+ assert set(pool.running_keys()) == set("dh") -+ assert pool.waiting() == 1 -+ assert pool.waiting_for("h") == set("d") - - # last one - events["d"].send("dval") - # make sure both pool greenthreads get a chance to run - spin() - capture.step() -- assert_equals(pool.get("d"), "dval") -- assert_equals(set(pool.keys()), set("abcdefgh")) -- assert_equals(dict(pool.items()), -+ assert pool.get("d") == "dval" -+ assert set(pool.keys()) == set("abcdefgh") -+ assert (dict(pool.items()) == - dict(a=1, b=2, c=3, - d="dval", e="eval", f="fval", g="gval", h="hval")) -- assert_equals(pool.running(), 0) -- assert_false(pool.running_keys()) -- assert_equals(pool.waiting(), 0) -- assert_equals(pool.waiting_for("h"), set()) -+ assert pool.running() == 0 -+ assert not pool.running_keys() -+ assert pool.waiting() == 0 -+ assert pool.waiting_for("h") == set() - - capture.validate([ - ["h got b", "h got c"], -@@ -432,13 +431,13 @@ def test_spawn_many(): - spin() - # verify that e completed (also that post(key) within greenthread - # overrides implicit post of return value, which would be None) -- assert_equals(pool.get("e"), "e") -+ assert pool.get("e") == "e" - - # With the dependency graph shown above, it is not guaranteed whether b or - # c will complete first. Handle either case. - sequence = capture.sequence[:] - sequence[1:3] = [set([sequence[1].pop(), sequence[2].pop()])] -- assert_equals(sequence, -+ assert (sequence == - [set(["a done"]), - set(["b done", "c done"]), - set(["d done"]), -@@ -466,7 +465,7 @@ def test_wait_each_all(): - for pos in range(len(keys)): - # next value from wait_each() - k, v = next(each) -- assert_equals(k, keys[pos]) -+ assert k == keys[pos] - # advance every pool greenlet as far as it can go - spin() - # everything from keys[:pos+1] should have a value by now -@@ -494,7 +493,7 @@ def test_kill(): - pool.kill("a") - # didn't run - spin() -- assert_equals(pool.get("a"), None) -+ assert pool.get("a") == None - # killing it forgets about it - with assert_raises(KeyError): - pool.kill("a") -@@ -505,7 +504,7 @@ def test_kill(): - with assert_raises(KeyError): - pool.kill("a") - # verify it ran to completion -- assert_equals(pool.get("a"), 2) -+ assert pool.get("a") == 2 - - - def test_post_collision_preload(): -@@ -533,7 +532,7 @@ def test_post_collision_spawn(): - pool.kill("a") - # now we can post - pool.post("a", 3) -- assert_equals(pool.get("a"), 3) -+ assert pool.get("a") == 3 - - pool = DAGPool() - pool.spawn("a", (), lambda key, result: 4) -@@ -553,10 +552,10 @@ def test_post_replace(): - pool = DAGPool() - pool.post("a", 1) - pool.post("a", 2, replace=True) -- assert_equals(pool.get("a"), 2) -- assert_equals(dict(pool.wait_each("a")), dict(a=2)) -- assert_equals(pool.wait("a"), dict(a=2)) -- assert_equals(pool["a"], 2) -+ assert pool.get("a") == 2 -+ assert dict(pool.wait_each("a")) == dict(a=2) -+ assert pool.wait("a") == dict(a=2) -+ assert pool["a"] == 2 - - - def waitfor(capture, pool, key): -@@ -598,14 +597,14 @@ def test_waitall_exc(): - try: - pool.waitall() - except PropagateError as err: -- assert_equals(err.key, "a") -+ assert err.key == "a" - assert isinstance(err.exc, BogusError), \ - "exc attribute is {0}, not BogusError".format(err.exc) -- assert_equals(str(err.exc), "bogus") -+ assert str(err.exc) == "bogus" - msg = str(err) -- assert_in("PropagateError(a)", msg) -- assert_in("BogusError", msg) -- assert_in("bogus", msg) -+ assert "PropagateError(a)" in msg -+ assert "BogusError" in msg -+ assert "bogus" in msg - - - def test_propagate_exc(): -@@ -616,20 +615,20 @@ def test_propagate_exc(): - try: - pool["c"] - except PropagateError as errc: -- assert_equals(errc.key, "c") -+ assert errc.key == "c" - errb = errc.exc -- assert_equals(errb.key, "b") -+ assert errb.key == "b" - erra = errb.exc -- assert_equals(erra.key, "a") -+ assert erra.key == "a" - assert isinstance(erra.exc, BogusError), \ - "exc attribute is {0}, not BogusError".format(erra.exc) -- assert_equals(str(erra.exc), "bogus") -+ assert str(erra.exc) == "bogus" - msg = str(errc) -- assert_in("PropagateError(a)", msg) -- assert_in("PropagateError(b)", msg) -- assert_in("PropagateError(c)", msg) -- assert_in("BogusError", msg) -- assert_in("bogus", msg) -+ assert "PropagateError(a)" in msg -+ assert "PropagateError(b)" in msg -+ assert "PropagateError(c)" in msg -+ assert "BogusError" in msg -+ assert "bogus" in msg - - - def test_wait_each_exc(): -@@ -681,13 +680,13 @@ def test_post_get_exc(): - pass - - # wait_each_success() filters -- assert_equals(dict(pool.wait_each_success()), dict(a=bogua)) -- assert_equals(dict(pool.wait_each_success("ab")), dict(a=bogua)) -- assert_equals(dict(pool.wait_each_success("a")), dict(a=bogua)) -- assert_equals(dict(pool.wait_each_success("b")), {}) -+ assert dict(pool.wait_each_success()) == dict(a=bogua) -+ assert dict(pool.wait_each_success("ab")) == dict(a=bogua) -+ assert dict(pool.wait_each_success("a")) == dict(a=bogua) -+ assert dict(pool.wait_each_success("b")) == {} - - # wait_each_exception() filters the other way -- assert_equals(dict(pool.wait_each_exception()), dict(b=bogub)) -- assert_equals(dict(pool.wait_each_exception("ab")), dict(b=bogub)) -- assert_equals(dict(pool.wait_each_exception("a")), {}) -- assert_equals(dict(pool.wait_each_exception("b")), dict(b=bogub)) -+ assert dict(pool.wait_each_exception()) == dict(b=bogub) -+ assert dict(pool.wait_each_exception("ab")) == dict(b=bogub) -+ assert dict(pool.wait_each_exception("a")) == {} -+ assert dict(pool.wait_each_exception("b")) == dict(b=bogub) ---- a/tests/greenio_test.py -+++ b/tests/greenio_test.py -@@ -9,8 +9,6 @@ import socket as _orig_sock - import sys - import tempfile - --from nose.tools import eq_ -- - import eventlet - from eventlet import event, greenio, debug - from eventlet.hubs import get_hub -@@ -39,7 +37,7 @@ def expect_socket_timeout(function, *arg - raise AssertionError("socket.timeout not raised") - except socket.timeout as e: - assert hasattr(e, 'args') -- eq_(e.args[0], 'timed out') -+ assert e.args[0] == 'timed out' - - - def min_buf_size(): -@@ -674,8 +672,8 @@ class TestGreenSocket(tests.LimitedTestC - sender.sendto(b'second', 0, address) - - sender_address = ('127.0.0.1', sender.getsockname()[1]) -- eq_(receiver.recvfrom(1024), (b'first', sender_address)) -- eq_(receiver.recvfrom(1024), (b'second', sender_address)) -+ assert receiver.recvfrom(1024) == (b'first', sender_address) -+ assert receiver.recvfrom(1024) == (b'second', sender_address) - - - def test_get_fileno_of_a_socket_works(): ---- a/tests/nosewrapper.py -+++ b/tests/nosewrapper.py -@@ -1,20 +1,13 @@ - """ This script simply gets the paths correct for testing eventlet with the - hub extension for Nose.""" --import nose - from os.path import dirname, realpath, abspath - import sys -+import unittest - - - parent_dir = dirname(dirname(realpath(abspath(__file__)))) - if parent_dir not in sys.path: - sys.path.insert(0, parent_dir) - --# hudson does a better job printing the test results if the exit value is 0 --zero_status = '--force-zero-status' --if zero_status in sys.argv: -- sys.argv.remove(zero_status) -- launch = nose.run --else: -- launch = nose.main -- --launch(argv=sys.argv) -+if __name__ == '__main__': -+ unittest.main() diff --git a/eventlet-0.33.3.tar.gz b/eventlet-0.33.3.tar.gz deleted file mode 100644 index 1dc1904..0000000 --- a/eventlet-0.33.3.tar.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:722803e7eadff295347539da363d68ae155b8b26ae6a634474d0a920be73cfda -size 416190 diff --git a/eventlet-0.34.3.tar.gz b/eventlet-0.34.3.tar.gz new file mode 100644 index 0000000..c294610 --- /dev/null +++ b/eventlet-0.34.3.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed2d28a64414a001894b3baf5b650f2c9596b00d57f57d4d7a38f9d3d0c252e8 +size 538498 diff --git a/fix-py3-rlock.patch b/fix-py3-rlock.patch deleted file mode 100644 index 706b5d3..0000000 --- a/fix-py3-rlock.patch +++ /dev/null @@ -1,118 +0,0 @@ ---- - eventlet/patcher.py | 42 +++++++++++++---- - tests/isolated/patcher_existing_logging_module_lock.py | 30 ++++++++++++ - tests/patcher_test.py | 4 + - 3 files changed, 68 insertions(+), 8 deletions(-) - ---- a/eventlet/patcher.py -+++ b/eventlet/patcher.py -@@ -412,6 +412,23 @@ def _green_existing_locks(): - elif py3_style and not isinstance(obj, pyrlock_type): - _fix_py3_rlock(obj) - -+ if (3, 0) <= sys.version_info <= (3, 10): -+ # Older py3 won't have RLocks show up in gc.get_objects() -- see -+ # https://github.com/eventlet/eventlet/issues/546 -- so green a handful -+ # that we know are significant -+ import logging -+ if isinstance(logging._lock, rlock_type): -+ _fix_py3_rlock(logging._lock) -+ logging._acquireLock() -+ try: -+ for ref in logging._handlerList: -+ handler = ref() -+ if handler and isinstance(handler.lock, rlock_type): -+ _fix_py3_rlock(handler.lock) -+ del handler -+ finally: -+ logging._releaseLock() -+ - - def _fix_py2_rlock(rlock, tid): - import eventlet.green.threading -@@ -425,7 +442,7 @@ def _fix_py2_rlock(rlock, tid): - - def _fix_py3_rlock(old): - import gc -- import threading -+ from eventlet.green import threading - new = threading._PyRLock() - while old._is_owned(): - old.release() -@@ -434,14 +451,23 @@ def _fix_py3_rlock(old): - new.acquire() - gc.collect() - for ref in gc.get_referrers(old): -- try: -- ref_vars = vars(ref) -- except TypeError: -- pass -+ if isinstance(ref, dict): -+ for k, v in ref.items(): -+ if v is old: -+ ref[k] = new -+ elif isinstance(ref, list): -+ for k, v in enumerate(ref): -+ if v is old: -+ ref[k] = new - else: -- for k, v in ref_vars.items(): -- if v == old: -- setattr(ref, k, new) -+ try: -+ ref_vars = vars(ref) -+ except TypeError: -+ pass -+ else: -+ for k, v in ref_vars.items(): -+ if v is old: -+ setattr(ref, k, new) - - - def _green_os_modules(): ---- /dev/null -+++ b/tests/isolated/patcher_existing_logging_module_lock.py -@@ -0,0 +1,30 @@ -+# https://github.com/eventlet/eventlet/issues/730 -+# https://github.com/eventlet/eventlet/pull/754 -+__test__ = False -+ -+ -+if __name__ == "__main__": -+ import logging -+ import eventlet.patcher -+ eventlet.patcher.monkey_patch(thread=True) -+ import threading -+ -+ def take_and_release(): -+ try: -+ logging._lock.acquire() -+ finally: -+ logging._lock.release() -+ -+ assert logging._lock.acquire() -+ t = threading.Thread(target=take_and_release) -+ t.daemon = True -+ t.start() -+ -+ t.join(timeout=0.1) -+ # we should timeout, and the thread is still blocked waiting on the lock -+ assert t.is_alive() -+ -+ logging._lock.release() -+ t.join(timeout=0.1) -+ assert not t.is_alive() -+ print("pass") ---- a/tests/patcher_test.py -+++ b/tests/patcher_test.py -@@ -485,6 +485,10 @@ def test_patcher_existing_locks_unlocked - tests.run_isolated('patcher_existing_locks_unlocked.py') - - -+def test_patcher_existing_logging_module_lock(): -+ tests.run_isolated('patcher_existing_logging_module_lock.py') -+ -+ - def test_importlib_lock(): - tests.run_isolated('patcher_importlib_lock.py') - diff --git a/python-eventlet-FTBFS2028.patch b/python-eventlet-FTBFS2028.patch deleted file mode 100644 index e3efc80..0000000 --- a/python-eventlet-FTBFS2028.patch +++ /dev/null @@ -1,48 +0,0 @@ -https://github.com/eventlet/eventlet/pull/643 - -From df6b965c1b03a688c643dc7f5845cb88287027d1 Mon Sep 17 00:00:00 2001 -From: "Bernhard M. Wiedemann" -Date: Fri, 28 Aug 2020 20:24:42 +0200 -Subject: [PATCH] Extend test cert to 2049 - -This change makes tests pass after 2028 -Background: -As part of my work on reproducible builds for openSUSE, I check that software still gives identical build results in the future. -The usual offset is +15 years, because that is how long I expect some software will be used in some places. -This showed up failing tests in our package build. - -See https://reproducible-builds.org/ for why this matters. ---- - tests/test_server.crt | 16 ++++++++-------- - 1 file changed, 8 insertions(+), 8 deletions(-) - ---- a/tests/test_server.crt -+++ b/tests/test_server.crt -@@ -2,7 +2,7 @@ - MIIDwjCCAqqgAwIBAgIJAN19NW1oDKKtMA0GCSqGSIb3DQEBCwUAMHYxCzAJBgNV - BAYTAlRTMQ0wCwYDVQQIDARUZXN0MQ0wCwYDVQQHDARUZXN0MRYwFAYDVQQKDA1U - ZXN0IEV2ZW50bGV0MQ0wCwYDVQQLDARUZXN0MQ0wCwYDVQQDDARUZXN0MRMwEQYJ --KoZIhvcNAQkBFgRUZXN0MB4XDTE4MDgyMjEzNDIxMVoXDTI4MDgxOTEzNDIxMVow -+KoZIhvcNAQkBFgRUZXN0MB4XDTIwMDgyODEzMTUxNloXDTQ5MTIzMTEzMTUxN1ow - djELMAkGA1UEBhMCVFMxDTALBgNVBAgMBFRlc3QxDTALBgNVBAcMBFRlc3QxFjAU - BgNVBAoMDVRlc3QgRXZlbnRsZXQxDTALBgNVBAsMBFRlc3QxDTALBgNVBAMMBFRl - c3QxEzARBgkqhkiG9w0BCQEWBFRlc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -@@ -13,11 +13,11 @@ Lvc6cJHMKaxHCeIBOL+z/9kJqhh30eqsmNB5AXSo - 3+GNgzZJ3KGape7pcBYER7zg/yZLZxgNFlTCOZiysjNxC0liJA9tgUQhRc1gsqA8 - dQxzvqW8kuZedmatjyM58WixvjymobC3AgMBAAGjUzBRMB0GA1UdDgQWBBQT3V3f - 8vCoqGXe6zySSjVP+J/P7zAfBgNVHSMEGDAWgBQT3V3f8vCoqGXe6zySSjVP+J/P --7zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAws7zXsftw5s/P --dnyFAf8q0WoVtWi9ljshWfJvTFMfuCqdSoNT+kIeQq071/RFW9YLqDZGXI4fyfzW --50A7xFm+Syy7wPOmfLJdPx5HRJ5jgIDlij9vL45W3mXEohkkzMCdjwXfOIQPOEEx --ZQHF57RaHlKEGexc/yvOLlOgKP23BOgB7pZjCC9divyDJ3ETlzgE+UTymHxmFM0i --TCAM9dGEl1QPr7zA08rNgVae+/uQksdM55QmQFkTAXisFPcxNgHSKOSHsDiUJvWG --7bJrwO6+T2wjRxWRD7anQV3DqBG1WteXA/dfYqjUi0QPqreWqNb+3OM60UwPJsvl --ZDfUrsbY -+7zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAowDu7bu/6DuLH -+yNv8Q27vhsHS2IbguGSTlpSObUqvIF1fv2UzjSl1jjmkN7IQqEjV7ql7NpmVGv5u -+4s5uKGID9q0Eq1wNKpqps16ABOb6I415j3NMq7r9bSNrlgPHrSYnySSyN2JyiXyR -+Q9wxY6YBQMHchFytYui9/A3WwmzfZkzZpN2AWlC/emiDlfbWT9bFO6tgImrD3BIi -+PJoTsc5SBmevUvOC6LPPIKq3/SdywgAi3AGKcyMlLhEjFX5lRA3GK3pDudRqKY2+ -+3n6WcOut0RytatsNYqIMVakIGC4ZCLi69xSLlRVVvxnfGgJxw+mHLtlQxDz2GoQ+ -+XAW8Yf8H - -----END CERTIFICATE----- diff --git a/python-eventlet.changes b/python-eventlet.changes index 5be9827..949d26f 100644 --- a/python-eventlet.changes +++ b/python-eventlet.changes @@ -1,3 +1,39 @@ +------------------------------------------------------------------- +Fri Jan 19 08:47:40 UTC 2024 - Dirk Müller + +- update to 0.34.3: + * Fix security issue in the wsgi module related to RFC 9112 + * Fix segfault, a new approach for greening existing locks + * greendns: fix getaddrinfo parameter name + * Fix deprecation warning on ssl.PROTOCOL_TLS + * Pytests, fix error at teardown of + TestGreenSocket.test_full_duplex + * Skip test which uses Py cgi module + * Drop old code based on python < 3.7 + * Allowing inheritance of GreenSSLSocket without overriding the + __new_ method https://github.com/eventlet/eventlet/pull/796 + * [bug] Fix broken API related to `__version__` removal + * [doc] Fix pypi broken link + * 0.34.1 + * [bug] Fix memory leak in greendns + * [infra] Fix OIDC authentication failure + * [bug] Ignore asyncore and asynchat for Python 3.12+ + * 0.34.0 (Not released on Pypi) + * Dropped support for Python 3.6 and earlier. + * Fix Python 3.13 compat by adding missing attibute + '_is_main_interpreter' + * Add support of Python 3.12 + * Drop unmaintained and unused stdlib tests + * Fix tests and CI for Python 3.7 and higher + * Stop claiming to create universal wheels + * Fix green logging locks for Python versions <= 3.10 +- switch to PEP517 build +- cleanup tests +- drop support-python3.12.patch, denose-eventlet.patch: + solved differently upstream +- drop python-eventlet-FTBFS2028.patch fix-py3-rlock.patch: + upstream + ------------------------------------------------------------------- Thu Nov 30 04:41:02 UTC 2023 - Steve Kowalik @@ -44,7 +80,7 @@ Sat Jan 28 12:30:48 UTC 2023 - Dirk Müller Tue Jan 17 16:55:46 UTC 2023 - Daniel Garcia - Add fix-py3-rlock.patch to make the code compatible with python 3.11, - gh#eventlet/eventlet#754 + gh#eventlet/eventlet#754 ------------------------------------------------------------------- Fri Dec 9 10:16:59 UTC 2022 - Thorsten Kukuk @@ -54,7 +90,7 @@ Fri Dec 9 10:16:59 UTC 2022 - Thorsten Kukuk ------------------------------------------------------------------- Sat Dec 3 05:14:32 UTC 2022 - Yogalakshmi Arunachalam -- Update to v0.33.2 +- Update to v0.33.2 * Stop using deprecated threading APIs Way back in py26, snake_case alternatives were added for the old camelCase APIs. py310 started emitting DeprecationWarnings about them; @@ -138,18 +174,18 @@ Thu Dec 10 22:43:44 UTC 2020 - Benjamin Greiner Mon Dec 7 00:14:23 UTC 2020 - Benjamin Greiner - Update to 0.29.1 - * patcher: [py27] recursion error in pytest/python2.7 installing + * patcher: [py27] recursion error in pytest/python2.7 installing register_at_fork - * patcher: monkey_patch(builtins=True) failed on py3 because + * patcher: monkey_patch(builtins=True) failed on py3 because `file` class is gone - * don't crash on PyPy 7.0.0 - * Only install monotonic on python2 + * don't crash on PyPy 7.0.0 + * Only install monotonic on python2 - Changes for 0.29.0 * ssl: context wrapped listener fails accept() - Changes for 0.28.1 * Clean up TypeError in __del__ - Changes for 0.28.0 - * Always remove the right listener from the hub + * Always remove the right listener from the hub gh#enventlet/eventlet#645 - Changes for 0.27.0 * patcher: Clean up threading book-keeping at fork when diff --git a/python-eventlet.spec b/python-eventlet.spec index ae7055c..4f9c3d1 100644 --- a/python-eventlet.spec +++ b/python-eventlet.spec @@ -1,7 +1,7 @@ # # spec file for package python-eventlet # -# Copyright (c) 2023 SUSE LLC +# Copyright (c) 2024 SUSE LLC # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -16,38 +16,28 @@ # -%define skip_python2 1 %{?sle15_python_module_pythons} Name: python-eventlet -Version: 0.33.3 +Version: 0.34.3 Release: 0 Summary: Concurrent networking library for Python License: MIT Group: Development/Languages/Python URL: https://eventlet.net Source: https://files.pythonhosted.org/packages/source/e/eventlet/eventlet-%{version}.tar.gz -# PATCH-FEATURE-UPSTREAM remove_nose.patch gh#eventlet/eventlet#638 mcepl@suse.com -# Removes dependency on nose -Patch0: denose-eventlet.patch -# PATCH-FIX-UPSTREAM https://github.com/eventlet/eventlet/pull/643 -Patch2: python-eventlet-FTBFS2028.patch -# PATCH-FIX-UPSTREAM fix-py3-rlock.patch gh#eventlet/eventlet#754 -Patch3: fix-py3-rlock.patch -# PATCH-FIX-OPENSUSE Based on https://src.fedoraproject.org/rpms/python-eventlet/raw/rawhide/f/python3.12.patch -Patch4: support-python3.12.patch -BuildRequires: %{python_module setuptools} +BuildRequires: %{python_module hatch-vcs} +BuildRequires: %{python_module pip} +BuildRequires: %{python_module wheel} BuildRequires: fdupes BuildRequires: python-rpm-macros Requires: netcfg Requires: python-dnspython >= 1.15.0 -Requires: python-greenlet >= 0.3 -Requires: python-six >= 1.10.0 +Requires: python-greenlet >= 1.0 BuildArch: noarch # SECTION TEST requirements BuildRequires: %{python_module dnspython >= 1.15.0} -BuildRequires: %{python_module greenlet >= 0.3} +BuildRequires: %{python_module greenlet >= 1.0} BuildRequires: %{python_module pytest} -BuildRequires: %{python_module six >= 1.10.0} BuildRequires: %{python_module testsuite} # eventlet parses /etc/protocols which is not available in normal build envs BuildRequires: netcfg @@ -70,29 +60,19 @@ interpreter, or as part of a larger application. %prep %autosetup -p1 -n eventlet-%{version} -# Fix non-executable script -sed -i '1{/^#!/ d}' eventlet/support/greendns.py - %build -%python_build +%pyproject_wheel %install -%python_install +%pyproject_install %python_expand %fdupes %{buildroot}%{$python_sitelib} %check -# python2 is required to build for Leap, but tests fail (even upstream) -python2_pytest_param='--collect-only' # dnspython 1 and 2: backdoor tests fail with "take too long" skiptests="(BackdoorTest and test_server)" -# fail only with dnspython 2: skiptests+=" or test_dns_methods_are_green or test_noraise_dns_tcp or test_clear" # These are flaky inside the OBS environment skiptests+=" or test_fork_after_monkey_patch or test_send_1k_req_rep or test_cpu_usage_after_bind" -# tracebacks in denosed suite with pytest inside obs presumably work different than when upstream is running nose? -skiptests+=" or test_leakage_from_tracebacks" -# temporarily disable to build with OpenSSL 3.0 bsc#1205042 -skiptests+=" or test_017_ssl_zeroreturnerror" # it is racy, see: https://lore.kernel.org/all/CADVnQy=AnJY9NZ3w_xNghEG80-DhsXL0r_vEtkr=dmz0ugcoVw@mail.gmail.com/ (bsc#1202188) skiptests+=" or test_018b_http_10_keepalive_framing" # gh#eventlet/eventlet#803 @@ -100,23 +80,14 @@ skiptests+=" or test_raise_dns_tcp" # gh#eventlet/eventlet#821 bsc#1216858 skiptests+=" or test_full_duplex" -# Unknown Python 3.6 specific errors -# TypeError: _wrap_socket() argument 1 must be _socket.socket, not SSLSocket -# https://github.com/rthalley/dnspython/issues/559#issuecomment-675274960 -python36_skiptests+=" or test_connect_ssl or test_ssl_sending_messages or test_wrap_ssl" -python36_skiptests+=" or ssl_test or wsgi_test" -python3_skiptests+="$python36_skiptests" -# https://github.com/eventlet/eventlet/issues/730 -python310_skiptests+=" or test_patcher_existing_locks_locked" # https://github.com/eventlet/eventlet/issues/739 python310_skiptests+=" or test_017_ssl_zeroreturnerror" -# no subdir recursion https://github.com/eventlet/eventlet/issues/638#issuecomment-676085599 -%pytest -o norecursedirs="tests/*" -k "not ($skiptests ${$python_skiptests})" ${$python_pytest_param} +%pytest -k "not ($skiptests ${$python_skiptests})" ${$python_pytest_param} %files %{python_files} %license LICENSE %doc AUTHORS NEWS README.rst %{python_sitelib}/eventlet -%{python_sitelib}/eventlet-%{version}*-info +%{python_sitelib}/eventlet-%{version}.dist-info %changelog diff --git a/support-python3.12.patch b/support-python3.12.patch deleted file mode 100644 index 14e4d08..0000000 --- a/support-python3.12.patch +++ /dev/null @@ -1,155 +0,0 @@ -Index: eventlet-0.33.3/eventlet/green/http/client.py -=================================================================== ---- eventlet-0.33.3.orig/eventlet/green/http/client.py -+++ eventlet-0.33.3/eventlet/green/http/client.py -@@ -52,7 +52,7 @@ - # 8. By copying, installing or otherwise using Python, Licensee - # agrees to be bound by the terms and conditions of this License - # Agreement. --"""HTTP/1.1 client library -+r"""HTTP/1.1 client library - - - -@@ -1447,6 +1447,18 @@ try: - except ImportError: - pass - else: -+ def _create_https_context(http_version): -+ # Function also used by urllib.request to be able to set the check_hostname -+ # attribute on a context object. -+ context = ssl._create_default_https_context() -+ # send ALPN extension to indicate HTTP/1.1 protocol -+ if http_version == 11: -+ context.set_alpn_protocols(['http/1.1']) -+ # enable PHA for TLS 1.3 connections if available -+ if context.post_handshake_auth is not None: -+ context.post_handshake_auth = True -+ return context -+ - class HTTPSConnection(HTTPConnection): - "This class allows communication via SSL." - -@@ -1463,13 +1475,9 @@ else: - self.key_file = key_file - self.cert_file = cert_file - if context is None: -- context = ssl._create_default_https_context() -- will_verify = context.verify_mode != ssl.CERT_NONE -- if check_hostname is None: -- check_hostname = context.check_hostname -- if check_hostname and not will_verify: -- raise ValueError("check_hostname needs a SSL context with " -- "either CERT_OPTIONAL or CERT_REQUIRED") -+ context = _create_https_context(self._http_vsn) -+ if check_hostname is not None: -+ context.check_hostname = check_hostname - if key_file or cert_file: - context.load_cert_chain(cert_file, key_file) - self._context = context -Index: eventlet-0.33.3/eventlet/green/ssl.py -=================================================================== ---- eventlet-0.33.3.orig/eventlet/green/ssl.py -+++ eventlet-0.33.3/eventlet/green/ssl.py -@@ -22,21 +22,17 @@ __patched__ = [ - 'create_default_context', '_create_default_https_context'] - - _original_sslsocket = __ssl.SSLSocket --_original_wrap_socket = __ssl.wrap_socket - _original_sslcontext = getattr(__ssl, 'SSLContext', None) - _is_under_py_3_7 = sys.version_info < (3, 7) - - - @contextmanager - def _original_ssl_context(*args, **kwargs): -- tmp_sslcontext = _original_wrap_socket.__globals__.get('SSLContext', None) - tmp_sslsocket = _original_sslsocket._create.__globals__.get('SSLSocket', None) - _original_sslsocket._create.__globals__['SSLSocket'] = _original_sslsocket -- _original_wrap_socket.__globals__['SSLContext'] = _original_sslcontext - try: - yield - finally: -- _original_wrap_socket.__globals__['SSLContext'] = tmp_sslcontext - _original_sslsocket._create.__globals__['SSLSocket'] = tmp_sslsocket - - -@@ -76,16 +72,21 @@ class GreenSSLSocket(_original_sslsocket - session=kw.get('session'), - ) - else: -- ret = _original_wrap_socket( -+ context = _original_sslcontext(protocol=ssl_version) -+ context.options |= cert_reqs -+ if certfile or keyfile: -+ context.load_cert_chain( -+ certfile=certfile, -+ keyfile=keyfile, -+ ) -+ if ca_certs: -+ context.load_verify_locations(ca_certs) -+ if ciphers := kw.get('ciphers'): -+ context.set_ciphers(ciphers) -+ ret = context.wrap_socket( - sock=sock.fd, -- keyfile=keyfile, -- certfile=certfile, - server_side=server_side, -- cert_reqs=cert_reqs, -- ssl_version=ssl_version, -- ca_certs=ca_certs, - do_handshake_on_connect=False, -- ciphers=kw.get('ciphers'), - ) - ret.keyfile = keyfile - ret.certfile = certfile -Index: eventlet-0.33.3/eventlet/green/thread.py -=================================================================== ---- eventlet-0.33.3.orig/eventlet/green/thread.py -+++ eventlet-0.33.3/eventlet/green/thread.py -@@ -113,3 +113,6 @@ if hasattr(__thread, 'stack_size'): - # this thread will suffer - - from eventlet.corolocal import local as _local -+ -+if hasattr(__thread, 'daemon_threads_allowed'): -+ daemon_threads_allowed = __thread.daemon_threads_allowed -Index: eventlet-0.33.3/tests/tpool_test.py -=================================================================== ---- eventlet-0.33.3.orig/tests/tpool_test.py -+++ eventlet-0.33.3/tests/tpool_test.py -@@ -315,11 +315,11 @@ class TpoolLongTests(tests.LimitedTestCa - - @tests.skip_with_pyevent - def test_a_buncha_stuff(self): -- assert_ = self.assert_ -+ assertTrue = self.assertTrue - - class Dummy(object): - def foo(self, when, token=None): -- assert_(token is not None) -+ assertTrue(token is not None) - time.sleep(random.random() / 200.0) - return token - -@@ -359,7 +359,7 @@ class TpoolLongTests(tests.LimitedTestCa - first_created = middle_objs - initial_objs - gc.collect() - second_created = len(gc.get_objects()) - middle_objs -- self.assert_(second_created - first_created < 10, -+ self.assertTrue(second_created - first_created < 10, - "first loop: %s, second loop: %s" % (first_created, - second_created)) - tpool.killall() -Index: eventlet-0.33.3/eventlet/debug.py -=================================================================== ---- eventlet-0.33.3.orig/eventlet/debug.py -+++ eventlet-0.33.3/eventlet/debug.py -@@ -13,7 +13,7 @@ __all__ = ['spew', 'unspew', 'format_hub - 'hub_prevent_multiple_readers', 'hub_timer_stacks', - 'hub_blocking_detection'] - --_token_splitter = re.compile('\W+') -+_token_splitter = re.compile(r'\W+') - - - class Spew(object):