Sync from SUSE:ALP:Source:Standard:1.0 python-eventlet revision f6a10ee58a7686e27176d271b5ba3b1c
This commit is contained in:
parent
59f9fde162
commit
e97789bd53
@ -1,435 +0,0 @@
|
||||
Index: eventlet-0.33.0/setup.py
|
||||
===================================================================
|
||||
--- eventlet-0.33.0.orig/setup.py
|
||||
+++ eventlet-0.33.0/setup.py
|
||||
@@ -27,7 +27,7 @@ setuptools.setup(
|
||||
'README.rst'
|
||||
)
|
||||
).read(),
|
||||
- test_suite='nose.collector',
|
||||
+ test_suite='tests',
|
||||
classifiers=[
|
||||
"Development Status :: 4 - Beta",
|
||||
"Intended Audience :: Developers",
|
||||
Index: eventlet-0.33.0/eventlet.egg-info/SOURCES.txt
|
||||
===================================================================
|
||||
--- eventlet-0.33.0.orig/eventlet.egg-info/SOURCES.txt
|
||||
+++ eventlet-0.33.0/eventlet.egg-info/SOURCES.txt
|
||||
@@ -175,7 +175,6 @@ tests/greenthread_test.py
|
||||
tests/hub_test.py
|
||||
tests/mock.py
|
||||
tests/mysqldb_test.py
|
||||
-tests/nosewrapper.py
|
||||
tests/openssl_test.py
|
||||
tests/os_test.py
|
||||
tests/parse_results.py
|
||||
@@ -275,4 +274,4 @@ tests/stdlib/test_threading_local.py
|
||||
tests/stdlib/test_timeout.py
|
||||
tests/stdlib/test_urllib.py
|
||||
tests/stdlib/test_urllib2.py
|
||||
-tests/stdlib/test_urllib2_localnet.py
|
||||
\ No newline at end of file
|
||||
+tests/stdlib/test_urllib2_localnet.py
|
||||
Index: eventlet-0.33.0/tests/greenio_test.py
|
||||
===================================================================
|
||||
--- eventlet-0.33.0.orig/tests/greenio_test.py
|
||||
+++ eventlet-0.33.0/tests/greenio_test.py
|
||||
@@ -9,8 +9,6 @@ import socket as _orig_sock
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
-from nose.tools import eq_
|
||||
-
|
||||
import eventlet
|
||||
from eventlet import event, greenio, debug
|
||||
from eventlet.hubs import get_hub
|
||||
@@ -39,7 +37,7 @@ def expect_socket_timeout(function, *arg
|
||||
raise AssertionError("socket.timeout not raised")
|
||||
except socket.timeout as e:
|
||||
assert hasattr(e, 'args')
|
||||
- eq_(e.args[0], 'timed out')
|
||||
+ assert e.args[0] == 'timed out'
|
||||
|
||||
|
||||
def min_buf_size():
|
||||
@@ -674,8 +672,8 @@ class TestGreenSocket(tests.LimitedTestC
|
||||
sender.sendto(b'second', 0, address)
|
||||
|
||||
sender_address = ('127.0.0.1', sender.getsockname()[1])
|
||||
- eq_(receiver.recvfrom(1024), (b'first', sender_address))
|
||||
- eq_(receiver.recvfrom(1024), (b'second', sender_address))
|
||||
+ assert receiver.recvfrom(1024) == (b'first', sender_address)
|
||||
+ assert receiver.recvfrom(1024) == (b'second', sender_address)
|
||||
|
||||
|
||||
def test_get_fileno_of_a_socket_works():
|
||||
Index: eventlet-0.33.0/tests/nosewrapper.py
|
||||
===================================================================
|
||||
--- eventlet-0.33.0.orig/tests/nosewrapper.py
|
||||
+++ eventlet-0.33.0/tests/nosewrapper.py
|
||||
@@ -1,20 +1,13 @@
|
||||
""" This script simply gets the paths correct for testing eventlet with the
|
||||
hub extension for Nose."""
|
||||
-import nose
|
||||
from os.path import dirname, realpath, abspath
|
||||
import sys
|
||||
+import unittest
|
||||
|
||||
|
||||
parent_dir = dirname(dirname(realpath(abspath(__file__))))
|
||||
if parent_dir not in sys.path:
|
||||
sys.path.insert(0, parent_dir)
|
||||
|
||||
-# hudson does a better job printing the test results if the exit value is 0
|
||||
-zero_status = '--force-zero-status'
|
||||
-if zero_status in sys.argv:
|
||||
- sys.argv.remove(zero_status)
|
||||
- launch = nose.run
|
||||
-else:
|
||||
- launch = nose.main
|
||||
-
|
||||
-launch(argv=sys.argv)
|
||||
+if __name__ == '__main__':
|
||||
+ unittest.main()
|
||||
Index: eventlet-0.33.0/tests/__init__.py
|
||||
===================================================================
|
||||
--- eventlet-0.33.0.orig/tests/__init__.py
|
||||
+++ eventlet-0.33.0/tests/__init__.py
|
||||
@@ -20,7 +20,7 @@ import sys
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
-from nose.plugins.skip import SkipTest
|
||||
+from unittest import SkipTest
|
||||
|
||||
import eventlet
|
||||
from eventlet import tpool
|
||||
@@ -223,7 +223,6 @@ class LimitedTestCase(unittest.TestCase)
|
||||
def check_idle_cpu_usage(duration, allowed_part):
|
||||
if resource is None:
|
||||
# TODO: use https://code.google.com/p/psutil/
|
||||
- from nose.plugins.skip import SkipTest
|
||||
raise SkipTest('CPU usage testing not supported (`import resource` failed)')
|
||||
|
||||
r1 = resource.getrusage(resource.RUSAGE_SELF)
|
||||
Index: eventlet-0.33.0/tests/dagpool_test.py
|
||||
===================================================================
|
||||
--- eventlet-0.33.0.orig/tests/dagpool_test.py
|
||||
+++ eventlet-0.33.0/tests/dagpool_test.py
|
||||
@@ -5,7 +5,6 @@
|
||||
@brief Test DAGPool class
|
||||
"""
|
||||
|
||||
-from nose.tools import *
|
||||
import eventlet
|
||||
from eventlet.dagpool import DAGPool, Collision, PropagateError
|
||||
import six
|
||||
@@ -13,8 +12,8 @@ from contextlib import contextmanager
|
||||
import itertools
|
||||
|
||||
|
||||
-# Not all versions of nose.tools.assert_raises() support the usage in this
|
||||
-# module, but it's straightforward enough to code that explicitly.
|
||||
+# Not all versions of assert_raises() support the usage in this module,
|
||||
+# but it's straightforward enough to code that explicitly.
|
||||
@contextmanager
|
||||
def assert_raises(exc):
|
||||
"""exc is an exception class"""
|
||||
@@ -163,7 +162,7 @@ class Capture(object):
|
||||
# a set. Make a set containing its elements.
|
||||
setlist.append(set(subseq))
|
||||
# Now that we've massaged 'sequence' into 'setlist', compare.
|
||||
- assert_equal(self.sequence, setlist)
|
||||
+ assert self.sequence == setlist
|
||||
|
||||
|
||||
# ****************************************************************************
|
||||
@@ -191,14 +190,14 @@ def test_init():
|
||||
with check_no_suspend():
|
||||
results = pool.waitall()
|
||||
# with no spawn() or post(), waitall() returns preload data
|
||||
- assert_equals(results, dict(a=1, b=2, c=3))
|
||||
+ assert results == dict(a=1, b=2, c=3)
|
||||
|
||||
# preload sequence of pairs
|
||||
pool = DAGPool([("d", 4), ("e", 5), ("f", 6)])
|
||||
# this must not hang
|
||||
with check_no_suspend():
|
||||
results = pool.waitall()
|
||||
- assert_equals(results, dict(d=4, e=5, f=6))
|
||||
+ assert results == dict(d=4, e=5, f=6)
|
||||
|
||||
|
||||
def test_wait_each_empty():
|
||||
@@ -216,10 +215,10 @@ def test_wait_each_preload():
|
||||
with check_no_suspend():
|
||||
# wait_each() may deliver in arbitrary order; collect into a dict
|
||||
# for comparison
|
||||
- assert_equals(dict(pool.wait_each("abc")), dict(a=1, b=2, c=3))
|
||||
+ assert dict(pool.wait_each("abc")) == dict(a=1, b=2, c=3)
|
||||
|
||||
# while we're at it, test wait() for preloaded keys
|
||||
- assert_equals(pool.wait("bc"), dict(b=2, c=3))
|
||||
+ assert pool.wait("bc") == dict(b=2, c=3)
|
||||
|
||||
|
||||
def post_each(pool, capture):
|
||||
@@ -257,7 +256,7 @@ def test_wait_posted():
|
||||
eventlet.spawn(post_each, pool, capture)
|
||||
gotten = pool.wait("bcdefg")
|
||||
capture.add("got all")
|
||||
- assert_equals(gotten,
|
||||
+ assert (gotten ==
|
||||
dict(b=2, c=3,
|
||||
d="dval", e="eval",
|
||||
f="fval", g="gval"))
|
||||
@@ -285,7 +284,7 @@ def test_spawn_collision_spawn():
|
||||
pool = DAGPool()
|
||||
pool.spawn("a", (), lambda key, results: "aval")
|
||||
# hasn't yet even started
|
||||
- assert_equals(pool.get("a"), None)
|
||||
+ assert pool.get("a") == None
|
||||
with assert_raises(Collision):
|
||||
# Attempting to spawn again with same key should collide even if the
|
||||
# first spawned greenthread hasn't yet had a chance to run.
|
||||
@@ -293,7 +292,7 @@ def test_spawn_collision_spawn():
|
||||
# now let the spawned eventlet run
|
||||
eventlet.sleep(0)
|
||||
# should have finished
|
||||
- assert_equals(pool.get("a"), "aval")
|
||||
+ assert pool.get("a") == "aval"
|
||||
with assert_raises(Collision):
|
||||
# Attempting to spawn with same key collides even when the greenthread
|
||||
# has completed.
|
||||
@@ -324,60 +323,60 @@ def test_spawn_multiple():
|
||||
capture.step()
|
||||
# but none of them has yet produced a result
|
||||
for k in "defgh":
|
||||
- assert_equals(pool.get(k), None)
|
||||
- assert_equals(set(pool.keys()), set("abc"))
|
||||
- assert_equals(dict(pool.items()), dict(a=1, b=2, c=3))
|
||||
- assert_equals(pool.running(), 5)
|
||||
- assert_equals(set(pool.running_keys()), set("defgh"))
|
||||
- assert_equals(pool.waiting(), 1)
|
||||
- assert_equals(pool.waiting_for(), dict(h=set("defg")))
|
||||
- assert_equals(pool.waiting_for("d"), set())
|
||||
- assert_equals(pool.waiting_for("c"), set())
|
||||
+ assert pool.get(k) == None
|
||||
+ assert set(pool.keys()) == set("abc")
|
||||
+ assert dict(pool.items()) == dict(a=1, b=2, c=3)
|
||||
+ assert pool.running() == 5
|
||||
+ assert set(pool.running_keys()) == set("defgh")
|
||||
+ assert pool.waiting() == 1
|
||||
+ assert pool.waiting_for() == dict(h=set("defg"))
|
||||
+ assert pool.waiting_for("d") == set()
|
||||
+ assert pool.waiting_for("c") == set()
|
||||
with assert_raises(KeyError):
|
||||
pool.waiting_for("j")
|
||||
- assert_equals(pool.waiting_for("h"), set("defg"))
|
||||
+ assert pool.waiting_for("h") == set("defg")
|
||||
|
||||
# let one of the upstream greenthreads complete
|
||||
events["f"].send("fval")
|
||||
spin()
|
||||
capture.step()
|
||||
- assert_equals(pool.get("f"), "fval")
|
||||
- assert_equals(set(pool.keys()), set("abcf"))
|
||||
- assert_equals(dict(pool.items()), dict(a=1, b=2, c=3, f="fval"))
|
||||
- assert_equals(pool.running(), 4)
|
||||
- assert_equals(set(pool.running_keys()), set("degh"))
|
||||
- assert_equals(pool.waiting(), 1)
|
||||
- assert_equals(pool.waiting_for("h"), set("deg"))
|
||||
+ assert pool.get("f") == "fval"
|
||||
+ assert set(pool.keys()) == set("abcf")
|
||||
+ assert dict(pool.items()) == dict(a=1, b=2, c=3, f="fval")
|
||||
+ assert pool.running() == 4
|
||||
+ assert set(pool.running_keys()) == set("degh")
|
||||
+ assert pool.waiting() == 1
|
||||
+ assert pool.waiting_for("h") == set("deg")
|
||||
|
||||
# now two others
|
||||
events["e"].send("eval")
|
||||
events["g"].send("gval")
|
||||
spin()
|
||||
capture.step()
|
||||
- assert_equals(pool.get("e"), "eval")
|
||||
- assert_equals(pool.get("g"), "gval")
|
||||
- assert_equals(set(pool.keys()), set("abcefg"))
|
||||
- assert_equals(dict(pool.items()),
|
||||
+ assert pool.get("e") == "eval"
|
||||
+ assert pool.get("g") == "gval"
|
||||
+ assert set(pool.keys()) == set("abcefg")
|
||||
+ assert (dict(pool.items()) ==
|
||||
dict(a=1, b=2, c=3, e="eval", f="fval", g="gval"))
|
||||
- assert_equals(pool.running(), 2)
|
||||
- assert_equals(set(pool.running_keys()), set("dh"))
|
||||
- assert_equals(pool.waiting(), 1)
|
||||
- assert_equals(pool.waiting_for("h"), set("d"))
|
||||
+ assert pool.running() == 2
|
||||
+ assert set(pool.running_keys()) == set("dh")
|
||||
+ assert pool.waiting() == 1
|
||||
+ assert pool.waiting_for("h") == set("d")
|
||||
|
||||
# last one
|
||||
events["d"].send("dval")
|
||||
# make sure both pool greenthreads get a chance to run
|
||||
spin()
|
||||
capture.step()
|
||||
- assert_equals(pool.get("d"), "dval")
|
||||
- assert_equals(set(pool.keys()), set("abcdefgh"))
|
||||
- assert_equals(dict(pool.items()),
|
||||
+ assert pool.get("d") == "dval"
|
||||
+ assert set(pool.keys()) == set("abcdefgh")
|
||||
+ assert (dict(pool.items()) ==
|
||||
dict(a=1, b=2, c=3,
|
||||
d="dval", e="eval", f="fval", g="gval", h="hval"))
|
||||
- assert_equals(pool.running(), 0)
|
||||
- assert_false(pool.running_keys())
|
||||
- assert_equals(pool.waiting(), 0)
|
||||
- assert_equals(pool.waiting_for("h"), set())
|
||||
+ assert pool.running() == 0
|
||||
+ assert not pool.running_keys()
|
||||
+ assert pool.waiting() == 0
|
||||
+ assert pool.waiting_for("h") == set()
|
||||
|
||||
capture.validate([
|
||||
["h got b", "h got c"],
|
||||
@@ -432,13 +431,13 @@ def test_spawn_many():
|
||||
spin()
|
||||
# verify that e completed (also that post(key) within greenthread
|
||||
# overrides implicit post of return value, which would be None)
|
||||
- assert_equals(pool.get("e"), "e")
|
||||
+ assert pool.get("e") == "e"
|
||||
|
||||
# With the dependency graph shown above, it is not guaranteed whether b or
|
||||
# c will complete first. Handle either case.
|
||||
sequence = capture.sequence[:]
|
||||
sequence[1:3] = [set([sequence[1].pop(), sequence[2].pop()])]
|
||||
- assert_equals(sequence,
|
||||
+ assert (sequence ==
|
||||
[set(["a done"]),
|
||||
set(["b done", "c done"]),
|
||||
set(["d done"]),
|
||||
@@ -466,7 +465,7 @@ def test_wait_each_all():
|
||||
for pos in range(len(keys)):
|
||||
# next value from wait_each()
|
||||
k, v = next(each)
|
||||
- assert_equals(k, keys[pos])
|
||||
+ assert k == keys[pos]
|
||||
# advance every pool greenlet as far as it can go
|
||||
spin()
|
||||
# everything from keys[:pos+1] should have a value by now
|
||||
@@ -494,7 +493,7 @@ def test_kill():
|
||||
pool.kill("a")
|
||||
# didn't run
|
||||
spin()
|
||||
- assert_equals(pool.get("a"), None)
|
||||
+ assert pool.get("a") == None
|
||||
# killing it forgets about it
|
||||
with assert_raises(KeyError):
|
||||
pool.kill("a")
|
||||
@@ -505,7 +504,7 @@ def test_kill():
|
||||
with assert_raises(KeyError):
|
||||
pool.kill("a")
|
||||
# verify it ran to completion
|
||||
- assert_equals(pool.get("a"), 2)
|
||||
+ assert pool.get("a") == 2
|
||||
|
||||
|
||||
def test_post_collision_preload():
|
||||
@@ -533,7 +532,7 @@ def test_post_collision_spawn():
|
||||
pool.kill("a")
|
||||
# now we can post
|
||||
pool.post("a", 3)
|
||||
- assert_equals(pool.get("a"), 3)
|
||||
+ assert pool.get("a") == 3
|
||||
|
||||
pool = DAGPool()
|
||||
pool.spawn("a", (), lambda key, result: 4)
|
||||
@@ -553,10 +552,10 @@ def test_post_replace():
|
||||
pool = DAGPool()
|
||||
pool.post("a", 1)
|
||||
pool.post("a", 2, replace=True)
|
||||
- assert_equals(pool.get("a"), 2)
|
||||
- assert_equals(dict(pool.wait_each("a")), dict(a=2))
|
||||
- assert_equals(pool.wait("a"), dict(a=2))
|
||||
- assert_equals(pool["a"], 2)
|
||||
+ assert pool.get("a") == 2
|
||||
+ assert dict(pool.wait_each("a")) == dict(a=2)
|
||||
+ assert pool.wait("a") == dict(a=2)
|
||||
+ assert pool["a"] == 2
|
||||
|
||||
|
||||
def waitfor(capture, pool, key):
|
||||
@@ -598,14 +597,14 @@ def test_waitall_exc():
|
||||
try:
|
||||
pool.waitall()
|
||||
except PropagateError as err:
|
||||
- assert_equals(err.key, "a")
|
||||
+ assert err.key == "a"
|
||||
assert isinstance(err.exc, BogusError), \
|
||||
"exc attribute is {0}, not BogusError".format(err.exc)
|
||||
- assert_equals(str(err.exc), "bogus")
|
||||
+ assert str(err.exc) == "bogus"
|
||||
msg = str(err)
|
||||
- assert_in("PropagateError(a)", msg)
|
||||
- assert_in("BogusError", msg)
|
||||
- assert_in("bogus", msg)
|
||||
+ assert "PropagateError(a)" in msg
|
||||
+ assert "BogusError" in msg
|
||||
+ assert "bogus" in msg
|
||||
|
||||
|
||||
def test_propagate_exc():
|
||||
@@ -616,20 +615,20 @@ def test_propagate_exc():
|
||||
try:
|
||||
pool["c"]
|
||||
except PropagateError as errc:
|
||||
- assert_equals(errc.key, "c")
|
||||
+ assert errc.key == "c"
|
||||
errb = errc.exc
|
||||
- assert_equals(errb.key, "b")
|
||||
+ assert errb.key == "b"
|
||||
erra = errb.exc
|
||||
- assert_equals(erra.key, "a")
|
||||
+ assert erra.key == "a"
|
||||
assert isinstance(erra.exc, BogusError), \
|
||||
"exc attribute is {0}, not BogusError".format(erra.exc)
|
||||
- assert_equals(str(erra.exc), "bogus")
|
||||
+ assert str(erra.exc) == "bogus"
|
||||
msg = str(errc)
|
||||
- assert_in("PropagateError(a)", msg)
|
||||
- assert_in("PropagateError(b)", msg)
|
||||
- assert_in("PropagateError(c)", msg)
|
||||
- assert_in("BogusError", msg)
|
||||
- assert_in("bogus", msg)
|
||||
+ assert "PropagateError(a)" in msg
|
||||
+ assert "PropagateError(b)" in msg
|
||||
+ assert "PropagateError(c)" in msg
|
||||
+ assert "BogusError" in msg
|
||||
+ assert "bogus" in msg
|
||||
|
||||
|
||||
def test_wait_each_exc():
|
||||
@@ -681,13 +680,13 @@ def test_post_get_exc():
|
||||
pass
|
||||
|
||||
# wait_each_success() filters
|
||||
- assert_equals(dict(pool.wait_each_success()), dict(a=bogua))
|
||||
- assert_equals(dict(pool.wait_each_success("ab")), dict(a=bogua))
|
||||
- assert_equals(dict(pool.wait_each_success("a")), dict(a=bogua))
|
||||
- assert_equals(dict(pool.wait_each_success("b")), {})
|
||||
+ assert dict(pool.wait_each_success()) == dict(a=bogua)
|
||||
+ assert dict(pool.wait_each_success("ab")) == dict(a=bogua)
|
||||
+ assert dict(pool.wait_each_success("a")) == dict(a=bogua)
|
||||
+ assert dict(pool.wait_each_success("b")) == {}
|
||||
|
||||
# wait_each_exception() filters the other way
|
||||
- assert_equals(dict(pool.wait_each_exception()), dict(b=bogub))
|
||||
- assert_equals(dict(pool.wait_each_exception("ab")), dict(b=bogub))
|
||||
- assert_equals(dict(pool.wait_each_exception("a")), {})
|
||||
- assert_equals(dict(pool.wait_each_exception("b")), dict(b=bogub))
|
||||
+ assert dict(pool.wait_each_exception()) == dict(b=bogub)
|
||||
+ assert dict(pool.wait_each_exception("ab")) == dict(b=bogub)
|
||||
+ assert dict(pool.wait_each_exception("a")) == {}
|
||||
+ assert dict(pool.wait_each_exception("b")) == dict(b=bogub)
|
BIN
eventlet-0.33.3.tar.gz
(Stored with Git LFS)
BIN
eventlet-0.33.3.tar.gz
(Stored with Git LFS)
Binary file not shown.
BIN
eventlet-0.34.3.tar.gz
(Stored with Git LFS)
Normal file
BIN
eventlet-0.34.3.tar.gz
(Stored with Git LFS)
Normal file
Binary file not shown.
@ -1,119 +0,0 @@
|
||||
diff --git a/eventlet/patcher.py b/eventlet/patcher.py
|
||||
index b249d6f19..4eeb93439 100644
|
||||
--- a/eventlet/patcher.py
|
||||
+++ b/eventlet/patcher.py
|
||||
@@ -412,6 +412,23 @@ def _green_existing_locks():
|
||||
elif py3_style and not isinstance(obj, pyrlock_type):
|
||||
_fix_py3_rlock(obj)
|
||||
|
||||
+ if (3, 0) <= sys.version_info <= (3, 10):
|
||||
+ # Older py3 won't have RLocks show up in gc.get_objects() -- see
|
||||
+ # https://github.com/eventlet/eventlet/issues/546 -- so green a handful
|
||||
+ # that we know are significant
|
||||
+ import logging
|
||||
+ if isinstance(logging._lock, rlock_type):
|
||||
+ _fix_py3_rlock(logging._lock)
|
||||
+ logging._acquireLock()
|
||||
+ try:
|
||||
+ for ref in logging._handlerList:
|
||||
+ handler = ref()
|
||||
+ if handler and isinstance(handler.lock, rlock_type):
|
||||
+ _fix_py3_rlock(handler.lock)
|
||||
+ del handler
|
||||
+ finally:
|
||||
+ logging._releaseLock()
|
||||
+
|
||||
|
||||
def _fix_py2_rlock(rlock, tid):
|
||||
import eventlet.green.threading
|
||||
@@ -425,7 +442,7 @@ def _fix_py2_rlock(rlock, tid):
|
||||
|
||||
def _fix_py3_rlock(old):
|
||||
import gc
|
||||
- import threading
|
||||
+ from eventlet.green import threading
|
||||
new = threading._PyRLock()
|
||||
while old._is_owned():
|
||||
old.release()
|
||||
@@ -434,14 +451,23 @@ def _fix_py3_rlock(old):
|
||||
new.acquire()
|
||||
gc.collect()
|
||||
for ref in gc.get_referrers(old):
|
||||
- try:
|
||||
- ref_vars = vars(ref)
|
||||
- except TypeError:
|
||||
- pass
|
||||
+ if isinstance(ref, dict):
|
||||
+ for k, v in ref.items():
|
||||
+ if v is old:
|
||||
+ ref[k] = new
|
||||
+ elif isinstance(ref, list):
|
||||
+ for k, v in enumerate(ref):
|
||||
+ if v is old:
|
||||
+ ref[k] = new
|
||||
else:
|
||||
- for k, v in ref_vars.items():
|
||||
- if v == old:
|
||||
- setattr(ref, k, new)
|
||||
+ try:
|
||||
+ ref_vars = vars(ref)
|
||||
+ except TypeError:
|
||||
+ pass
|
||||
+ else:
|
||||
+ for k, v in ref_vars.items():
|
||||
+ if v is old:
|
||||
+ setattr(ref, k, new)
|
||||
|
||||
|
||||
def _green_os_modules():
|
||||
diff --git a/tests/isolated/patcher_existing_logging_module_lock.py b/tests/isolated/patcher_existing_logging_module_lock.py
|
||||
new file mode 100644
|
||||
index 000000000..2acad62ee
|
||||
--- /dev/null
|
||||
+++ b/tests/isolated/patcher_existing_logging_module_lock.py
|
||||
@@ -0,0 +1,30 @@
|
||||
+# https://github.com/eventlet/eventlet/issues/730
|
||||
+# https://github.com/eventlet/eventlet/pull/754
|
||||
+__test__ = False
|
||||
+
|
||||
+
|
||||
+if __name__ == "__main__":
|
||||
+ import logging
|
||||
+ import eventlet.patcher
|
||||
+ eventlet.patcher.monkey_patch(thread=True)
|
||||
+ import threading
|
||||
+
|
||||
+ def take_and_release():
|
||||
+ try:
|
||||
+ logging._lock.acquire()
|
||||
+ finally:
|
||||
+ logging._lock.release()
|
||||
+
|
||||
+ assert logging._lock.acquire()
|
||||
+ t = threading.Thread(target=take_and_release)
|
||||
+ t.daemon = True
|
||||
+ t.start()
|
||||
+
|
||||
+ t.join(timeout=0.1)
|
||||
+ # we should timeout, and the thread is still blocked waiting on the lock
|
||||
+ assert t.is_alive()
|
||||
+
|
||||
+ logging._lock.release()
|
||||
+ t.join(timeout=0.1)
|
||||
+ assert not t.is_alive()
|
||||
+ print("pass")
|
||||
diff --git a/tests/patcher_test.py b/tests/patcher_test.py
|
||||
index dbf6e1c71..e8d6f3300 100644
|
||||
--- a/tests/patcher_test.py
|
||||
+++ b/tests/patcher_test.py
|
||||
@@ -485,6 +485,10 @@ def test_patcher_existing_locks_unlocked():
|
||||
tests.run_isolated('patcher_existing_locks_unlocked.py')
|
||||
|
||||
|
||||
+def test_patcher_existing_logging_module_lock():
|
||||
+ tests.run_isolated('patcher_existing_logging_module_lock.py')
|
||||
+
|
||||
+
|
||||
def test_importlib_lock():
|
||||
tests.run_isolated('patcher_importlib_lock.py')
|
||||
|
@ -1,20 +0,0 @@
|
||||
--- eventlet-0.33.0.orig/tests/greendns_test.py
|
||||
+++ eventlet-0.33.0/tests/greendns_test.py
|
||||
@@ -888,7 +888,7 @@ class TinyDNSTests(tests.LimitedTestCase
|
||||
# https://github.com/eventlet/eventlet/issues/499
|
||||
# None means we don't want the server to find the IP
|
||||
with tests.dns_tcp_server(None) as dnsaddr:
|
||||
- resolver = Resolver()
|
||||
+ resolver = Resolver(configure=False)
|
||||
resolver.nameservers = [dnsaddr[0]]
|
||||
resolver.nameserver_ports[dnsaddr[0]] = dnsaddr[1]
|
||||
|
||||
@@ -899,7 +899,7 @@ class TinyDNSTests(tests.LimitedTestCase
|
||||
# https://github.com/eventlet/eventlet/issues/499
|
||||
expected_ip = "192.168.1.1"
|
||||
with tests.dns_tcp_server(expected_ip) as dnsaddr:
|
||||
- resolver = Resolver()
|
||||
+ resolver = Resolver(configure=False)
|
||||
resolver.nameservers = [dnsaddr[0]]
|
||||
resolver.nameserver_ports[dnsaddr[0]] = dnsaddr[1]
|
||||
response = resolver.query('host.example.com', 'a', tcp=True)
|
@ -1,50 +0,0 @@
|
||||
https://github.com/eventlet/eventlet/pull/643
|
||||
|
||||
From df6b965c1b03a688c643dc7f5845cb88287027d1 Mon Sep 17 00:00:00 2001
|
||||
From: "Bernhard M. Wiedemann" <bwiedemann@suse.de>
|
||||
Date: Fri, 28 Aug 2020 20:24:42 +0200
|
||||
Subject: [PATCH] Extend test cert to 2049
|
||||
|
||||
This change makes tests pass after 2028
|
||||
Background:
|
||||
As part of my work on reproducible builds for openSUSE, I check that software still gives identical build results in the future.
|
||||
The usual offset is +15 years, because that is how long I expect some software will be used in some places.
|
||||
This showed up failing tests in our package build.
|
||||
|
||||
See https://reproducible-builds.org/ for why this matters.
|
||||
---
|
||||
tests/test_server.crt | 16 ++++++++--------
|
||||
1 file changed, 8 insertions(+), 8 deletions(-)
|
||||
|
||||
diff --git a/tests/test_server.crt b/tests/test_server.crt
|
||||
index 67c9adc8b..78759e810 100644
|
||||
--- a/tests/test_server.crt
|
||||
+++ b/tests/test_server.crt
|
||||
@@ -2,7 +2,7 @@
|
||||
MIIDwjCCAqqgAwIBAgIJAN19NW1oDKKtMA0GCSqGSIb3DQEBCwUAMHYxCzAJBgNV
|
||||
BAYTAlRTMQ0wCwYDVQQIDARUZXN0MQ0wCwYDVQQHDARUZXN0MRYwFAYDVQQKDA1U
|
||||
ZXN0IEV2ZW50bGV0MQ0wCwYDVQQLDARUZXN0MQ0wCwYDVQQDDARUZXN0MRMwEQYJ
|
||||
-KoZIhvcNAQkBFgRUZXN0MB4XDTE4MDgyMjEzNDIxMVoXDTI4MDgxOTEzNDIxMVow
|
||||
+KoZIhvcNAQkBFgRUZXN0MB4XDTIwMDgyODEzMTUxNloXDTQ5MTIzMTEzMTUxN1ow
|
||||
djELMAkGA1UEBhMCVFMxDTALBgNVBAgMBFRlc3QxDTALBgNVBAcMBFRlc3QxFjAU
|
||||
BgNVBAoMDVRlc3QgRXZlbnRsZXQxDTALBgNVBAsMBFRlc3QxDTALBgNVBAMMBFRl
|
||||
c3QxEzARBgkqhkiG9w0BCQEWBFRlc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
|
||||
@@ -13,11 +13,11 @@ Lvc6cJHMKaxHCeIBOL+z/9kJqhh30eqsmNB5AXSoV8b2B3MV3glW2vd5WJVYEWxl
|
||||
3+GNgzZJ3KGape7pcBYER7zg/yZLZxgNFlTCOZiysjNxC0liJA9tgUQhRc1gsqA8
|
||||
dQxzvqW8kuZedmatjyM58WixvjymobC3AgMBAAGjUzBRMB0GA1UdDgQWBBQT3V3f
|
||||
8vCoqGXe6zySSjVP+J/P7zAfBgNVHSMEGDAWgBQT3V3f8vCoqGXe6zySSjVP+J/P
|
||||
-7zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAws7zXsftw5s/P
|
||||
-dnyFAf8q0WoVtWi9ljshWfJvTFMfuCqdSoNT+kIeQq071/RFW9YLqDZGXI4fyfzW
|
||||
-50A7xFm+Syy7wPOmfLJdPx5HRJ5jgIDlij9vL45W3mXEohkkzMCdjwXfOIQPOEEx
|
||||
-ZQHF57RaHlKEGexc/yvOLlOgKP23BOgB7pZjCC9divyDJ3ETlzgE+UTymHxmFM0i
|
||||
-TCAM9dGEl1QPr7zA08rNgVae+/uQksdM55QmQFkTAXisFPcxNgHSKOSHsDiUJvWG
|
||||
-7bJrwO6+T2wjRxWRD7anQV3DqBG1WteXA/dfYqjUi0QPqreWqNb+3OM60UwPJsvl
|
||||
-ZDfUrsbY
|
||||
+7zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAowDu7bu/6DuLH
|
||||
+yNv8Q27vhsHS2IbguGSTlpSObUqvIF1fv2UzjSl1jjmkN7IQqEjV7ql7NpmVGv5u
|
||||
+4s5uKGID9q0Eq1wNKpqps16ABOb6I415j3NMq7r9bSNrlgPHrSYnySSyN2JyiXyR
|
||||
+Q9wxY6YBQMHchFytYui9/A3WwmzfZkzZpN2AWlC/emiDlfbWT9bFO6tgImrD3BIi
|
||||
+PJoTsc5SBmevUvOC6LPPIKq3/SdywgAi3AGKcyMlLhEjFX5lRA3GK3pDudRqKY2+
|
||||
+3n6WcOut0RytatsNYqIMVakIGC4ZCLi69xSLlRVVvxnfGgJxw+mHLtlQxDz2GoQ+
|
||||
+XAW8Yf8H
|
||||
-----END CERTIFICATE-----
|
@ -1,7 +1,56 @@
|
||||
-------------------------------------------------------------------
|
||||
Fri Jan 19 08:47:40 UTC 2024 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
- update to 0.34.3:
|
||||
* Fix security issue in the wsgi module related to RFC 9112
|
||||
* Fix segfault, a new approach for greening existing locks
|
||||
* greendns: fix getaddrinfo parameter name
|
||||
* Fix deprecation warning on ssl.PROTOCOL_TLS
|
||||
* Pytests, fix error at teardown of
|
||||
TestGreenSocket.test_full_duplex
|
||||
* Skip test which uses Py cgi module
|
||||
* Drop old code based on python < 3.7
|
||||
* Allowing inheritance of GreenSSLSocket without overriding the
|
||||
__new_ method https://github.com/eventlet/eventlet/pull/796
|
||||
* [bug] Fix broken API related to `__version__` removal
|
||||
* [doc] Fix pypi broken link
|
||||
* 0.34.1
|
||||
* [bug] Fix memory leak in greendns
|
||||
* [infra] Fix OIDC authentication failure
|
||||
* [bug] Ignore asyncore and asynchat for Python 3.12+
|
||||
* 0.34.0 (Not released on Pypi)
|
||||
* Dropped support for Python 3.6 and earlier.
|
||||
* Fix Python 3.13 compat by adding missing attibute
|
||||
'_is_main_interpreter'
|
||||
* Add support of Python 3.12
|
||||
* Drop unmaintained and unused stdlib tests
|
||||
* Fix tests and CI for Python 3.7 and higher
|
||||
* Stop claiming to create universal wheels
|
||||
* Fix green logging locks for Python versions <= 3.10
|
||||
- switch to PEP517 build
|
||||
- cleanup tests
|
||||
- drop support-python3.12.patch, denose-eventlet.patch:
|
||||
solved differently upstream
|
||||
- drop python-eventlet-FTBFS2028.patch fix-py3-rlock.patch:
|
||||
upstream
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu Nov 30 04:41:02 UTC 2023 - Steve Kowalik <steven.kowalik@suse.com>
|
||||
|
||||
- Add patch support-python3.12.patch:
|
||||
* Support Python 3.12 changes.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Nov 7 06:07:34 UTC 2023 - Jiri Slaby <jslaby@suse.cz>
|
||||
|
||||
- disable test_full_duplex (bsc#1216858)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Apr 21 12:24:43 UTC 2023 - Dirk Müller <dmueller@suse.com>
|
||||
|
||||
- add sle15_python_module_pythons (jsc#PED-68)
|
||||
- Remove upstreamed newdnspython.patch (bsc#1208126).
|
||||
- skip test_raise_dns_tcp test (gh#eventlet/eventlet#803)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu Apr 13 22:41:06 UTC 2023 - Matej Cepl <mcepl@suse.com>
|
||||
@ -31,7 +80,7 @@ Sat Jan 28 12:30:48 UTC 2023 - Dirk Müller <dmueller@suse.com>
|
||||
Tue Jan 17 16:55:46 UTC 2023 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
|
||||
- Add fix-py3-rlock.patch to make the code compatible with python 3.11,
|
||||
gh#eventlet/eventlet#754
|
||||
gh#eventlet/eventlet#754
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Dec 9 10:16:59 UTC 2022 - Thorsten Kukuk <kukuk@suse.com>
|
||||
@ -41,7 +90,7 @@ Fri Dec 9 10:16:59 UTC 2022 - Thorsten Kukuk <kukuk@suse.com>
|
||||
-------------------------------------------------------------------
|
||||
Sat Dec 3 05:14:32 UTC 2022 - Yogalakshmi Arunachalam <yarunachalam@suse.com>
|
||||
|
||||
- Update to v0.33.2
|
||||
- Update to v0.33.2
|
||||
* Stop using deprecated threading APIs
|
||||
Way back in py26, snake_case alternatives were added for the old
|
||||
camelCase APIs. py310 started emitting DeprecationWarnings about them;
|
||||
@ -125,18 +174,18 @@ Thu Dec 10 22:43:44 UTC 2020 - Benjamin Greiner <code@bnavigator.de>
|
||||
Mon Dec 7 00:14:23 UTC 2020 - Benjamin Greiner <code@bnavigator.de>
|
||||
|
||||
- Update to 0.29.1
|
||||
* patcher: [py27] recursion error in pytest/python2.7 installing
|
||||
* patcher: [py27] recursion error in pytest/python2.7 installing
|
||||
register_at_fork
|
||||
* patcher: monkey_patch(builtins=True) failed on py3 because
|
||||
* patcher: monkey_patch(builtins=True) failed on py3 because
|
||||
`file` class is gone
|
||||
* don't crash on PyPy 7.0.0
|
||||
* Only install monotonic on python2
|
||||
* don't crash on PyPy 7.0.0
|
||||
* Only install monotonic on python2
|
||||
- Changes for 0.29.0
|
||||
* ssl: context wrapped listener fails accept()
|
||||
- Changes for 0.28.1
|
||||
* Clean up TypeError in __del__
|
||||
- Changes for 0.28.0
|
||||
* Always remove the right listener from the hub
|
||||
* Always remove the right listener from the hub
|
||||
gh#enventlet/eventlet#645
|
||||
- Changes for 0.27.0
|
||||
* patcher: Clean up threading book-keeping at fork when
|
||||
|
@ -1,7 +1,7 @@
|
||||
#
|
||||
# spec file for package python-eventlet
|
||||
#
|
||||
# Copyright (c) 2023 SUSE LLC
|
||||
# Copyright (c) 2024 SUSE LLC
|
||||
#
|
||||
# All modifications and additions to the file contributed by third parties
|
||||
# remain the property of their copyright owners, unless otherwise agreed
|
||||
@ -16,38 +16,28 @@
|
||||
#
|
||||
|
||||
|
||||
%define skip_python2 1
|
||||
%{?sle15_python_module_pythons}
|
||||
Name: python-eventlet
|
||||
Version: 0.33.3
|
||||
Version: 0.34.3
|
||||
Release: 0
|
||||
Summary: Concurrent networking library for Python
|
||||
License: MIT
|
||||
Group: Development/Languages/Python
|
||||
URL: https://eventlet.net
|
||||
Source: https://files.pythonhosted.org/packages/source/e/eventlet/eventlet-%{version}.tar.gz
|
||||
# PATCH-FEATURE-UPSTREAM remove_nose.patch gh#eventlet/eventlet#638 mcepl@suse.com
|
||||
# Removes dependency on nose
|
||||
Patch0: denose-eventlet.patch
|
||||
# PATCH-FIX-UPSTREAM newdnspython.patch mcepl@suse.com -- patch is from gh#rthalley/dnspython#519, discussion in gh#eventlet/eventlet#638
|
||||
Patch1: newdnspython.patch
|
||||
# PATCH-FIX-UPSTREAM https://github.com/eventlet/eventlet/pull/643
|
||||
Patch2: python-eventlet-FTBFS2028.patch
|
||||
# PATCH-FIX-UPSTREAM fix-py3-rlock.patch gh#eventlet/eventlet#754
|
||||
Patch3: fix-py3-rlock.patch
|
||||
BuildRequires: %{python_module setuptools}
|
||||
BuildRequires: %{python_module hatch-vcs}
|
||||
BuildRequires: %{python_module pip}
|
||||
BuildRequires: %{python_module wheel}
|
||||
BuildRequires: fdupes
|
||||
BuildRequires: python-rpm-macros
|
||||
Requires: netcfg
|
||||
Requires: python-dnspython >= 1.15.0
|
||||
Requires: python-greenlet >= 0.3
|
||||
Requires: python-six >= 1.10.0
|
||||
Requires: python-greenlet >= 1.0
|
||||
BuildArch: noarch
|
||||
# SECTION TEST requirements
|
||||
BuildRequires: %{python_module dnspython >= 1.15.0}
|
||||
BuildRequires: %{python_module greenlet >= 0.3}
|
||||
BuildRequires: %{python_module greenlet >= 1.0}
|
||||
BuildRequires: %{python_module pytest}
|
||||
BuildRequires: %{python_module six >= 1.10.0}
|
||||
BuildRequires: %{python_module testsuite}
|
||||
# eventlet parses /etc/protocols which is not available in normal build envs
|
||||
BuildRequires: netcfg
|
||||
@ -68,52 +58,36 @@ is implicit, which means Eventlet can be used from the Python
|
||||
interpreter, or as part of a larger application.
|
||||
|
||||
%prep
|
||||
%setup -q -n eventlet-%{version}
|
||||
%autopatch -p1
|
||||
|
||||
# Fix non-executable script
|
||||
sed -i '1{/^#!/ d}' eventlet/support/greendns.py
|
||||
%autosetup -p1 -n eventlet-%{version}
|
||||
|
||||
%build
|
||||
%python_build
|
||||
%pyproject_wheel
|
||||
|
||||
%install
|
||||
%python_install
|
||||
%pyproject_install
|
||||
%python_expand %fdupes %{buildroot}%{$python_sitelib}
|
||||
|
||||
%check
|
||||
# python2 is required to build for Leap, but tests fail (even upstream)
|
||||
python2_pytest_param='--collect-only'
|
||||
# dnspython 1 and 2: backdoor tests fail with "take too long"
|
||||
skiptests="(BackdoorTest and test_server)"
|
||||
# fail only with dnspython 2:
|
||||
skiptests+=" or test_dns_methods_are_green or test_noraise_dns_tcp or test_clear"
|
||||
# These are flaky inside the OBS environment
|
||||
skiptests+=" or test_fork_after_monkey_patch or test_send_1k_req_rep or test_cpu_usage_after_bind"
|
||||
# tracebacks in denosed suite with pytest inside obs presumably work different than when upstream is running nose?
|
||||
skiptests+=" or test_leakage_from_tracebacks"
|
||||
# temporarily disable to build with OpenSSL 3.0 bsc#1205042
|
||||
skiptests+=" or test_017_ssl_zeroreturnerror"
|
||||
# it is racy, see: https://lore.kernel.org/all/CADVnQy=AnJY9NZ3w_xNghEG80-DhsXL0r_vEtkr=dmz0ugcoVw@mail.gmail.com/ (bsc#1202188)
|
||||
skiptests+=" or test_018b_http_10_keepalive_framing"
|
||||
# gh#eventlet/eventlet#803
|
||||
skiptests+=" or test_raise_dns_tcp"
|
||||
# gh#eventlet/eventlet#821 bsc#1216858
|
||||
skiptests+=" or test_full_duplex"
|
||||
|
||||
# Unknown Python 3.6 specific errors
|
||||
# TypeError: _wrap_socket() argument 1 must be _socket.socket, not SSLSocket
|
||||
# https://github.com/rthalley/dnspython/issues/559#issuecomment-675274960
|
||||
python36_skiptests+=" or test_connect_ssl or test_ssl_sending_messages or test_wrap_ssl"
|
||||
python36_skiptests+=" or ssl_test or wsgi_test"
|
||||
python3_skiptests+="$python36_skiptests"
|
||||
# https://github.com/eventlet/eventlet/issues/730
|
||||
python310_skiptests+=" or test_patcher_existing_locks_locked"
|
||||
# https://github.com/eventlet/eventlet/issues/739
|
||||
python310_skiptests+=" or test_017_ssl_zeroreturnerror"
|
||||
# no subdir recursion https://github.com/eventlet/eventlet/issues/638#issuecomment-676085599
|
||||
%pytest -o norecursedirs="tests/*" -k "not ($skiptests ${$python_skiptests})" ${$python_pytest_param}
|
||||
%pytest -k "not ($skiptests ${$python_skiptests})" ${$python_pytest_param}
|
||||
|
||||
%files %{python_files}
|
||||
%license LICENSE
|
||||
%doc AUTHORS NEWS README.rst
|
||||
%{python_sitelib}/eventlet
|
||||
%{python_sitelib}/eventlet-%{version}*-info
|
||||
%{python_sitelib}/eventlet-%{version}.dist-info
|
||||
|
||||
%changelog
|
||||
|
Loading…
Reference in New Issue
Block a user