mirror of
https://gitlab.gnome.org/GNOME/glib.git
synced 2024-11-10 03:16:17 +01:00
Merge branch 'th/optimize-weak-ref-list' into 'main'
[th/optimize-weak-ref-list] rework GObject's `WeakRefData` to track references in an array instead of GSList See merge request GNOME/glib!3869
This commit is contained in:
commit
b65657f068
@ -262,20 +262,61 @@ object_get_optional_flags_p (GObject *object)
|
|||||||
* and has a separate lifetime from the object. */
|
* and has a separate lifetime from the object. */
|
||||||
typedef struct
|
typedef struct
|
||||||
{
|
{
|
||||||
gatomicrefcount ref_count;
|
/* This is both an atomic ref-count and bit 30 (WEAK_REF_DATA_LOCK_BIT) is
|
||||||
gint lock_flags; /* (atomic) */
|
* used for g_bit_lock(). */
|
||||||
GSList *list; /* (element-type GWeakRef) (owned) */
|
gint atomic_field;
|
||||||
|
|
||||||
|
guint16 len;
|
||||||
|
|
||||||
|
/* Only relevant when len > 1. In that case, it's the allocated size of
|
||||||
|
* "list.many" array. */
|
||||||
|
guint16 alloc;
|
||||||
|
|
||||||
|
/* Only relevant when len > 0. In that case, either "one" or "many" union
|
||||||
|
* field is in use. */
|
||||||
|
union
|
||||||
|
{
|
||||||
|
GWeakRef *one;
|
||||||
|
GWeakRef **many;
|
||||||
|
} list;
|
||||||
} WeakRefData;
|
} WeakRefData;
|
||||||
|
|
||||||
|
/* We choose bit 30, and not bit 31. Bit 31 would be the sign for gint, so it
|
||||||
|
* a bit awkward to use. Note that it probably also would work fine.
|
||||||
|
*
|
||||||
|
* But 30 is ok, because it still leaves us space for 2^30-1 references, which
|
||||||
|
* is more than we ever need. */
|
||||||
|
#define WEAK_REF_DATA_LOCK_BIT 30
|
||||||
|
|
||||||
static void weak_ref_data_clear_list (WeakRefData *wrdata, GObject *object);
|
static void weak_ref_data_clear_list (WeakRefData *wrdata, GObject *object);
|
||||||
|
|
||||||
static WeakRefData *
|
static WeakRefData *
|
||||||
weak_ref_data_ref (WeakRefData *wrdata)
|
weak_ref_data_ref (WeakRefData *wrdata)
|
||||||
{
|
{
|
||||||
|
gint ref;
|
||||||
|
|
||||||
#if G_ENABLE_DEBUG
|
#if G_ENABLE_DEBUG
|
||||||
g_assert (wrdata);
|
g_assert (wrdata);
|
||||||
#endif
|
#endif
|
||||||
g_atomic_ref_count_inc (&wrdata->ref_count);
|
|
||||||
|
ref = g_atomic_int_add (&wrdata->atomic_field, 1);
|
||||||
|
|
||||||
|
#if G_ENABLE_DEBUG
|
||||||
|
/* Overflow is almost impossible to happen, because the user would need to
|
||||||
|
* spawn that many operating system threads, that all call
|
||||||
|
* g_weak_ref_{set,get}() in parallel.
|
||||||
|
*
|
||||||
|
* Still, assert in debug mode. */
|
||||||
|
g_assert (ref < G_MAXINT32);
|
||||||
|
|
||||||
|
/* the real ref-count would be the following: */
|
||||||
|
ref = (ref + 1) & ~(1 << WEAK_REF_DATA_LOCK_BIT);
|
||||||
|
|
||||||
|
/* assert that the ref-count is still in the valid range. */
|
||||||
|
g_assert (ref > 0 && ref < (1 << WEAK_REF_DATA_LOCK_BIT));
|
||||||
|
#endif
|
||||||
|
(void) ref;
|
||||||
|
|
||||||
return wrdata;
|
return wrdata;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -285,7 +326,16 @@ weak_ref_data_unref (WeakRefData *wrdata)
|
|||||||
if (!wrdata)
|
if (!wrdata)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (!g_atomic_ref_count_dec (&wrdata->ref_count))
|
/* Note that we also use WEAK_REF_DATA_LOCK_BIT on "atomic_field" as a bit
|
||||||
|
* lock. However, we will always keep the @wrdata alive (having a reference)
|
||||||
|
* while holding a lock (otherwise, we couldn't unlock anymore). Thus, at the
|
||||||
|
* point when we decrement the ref-count to zero, we surely also have the
|
||||||
|
* @wrdata unlocked.
|
||||||
|
*
|
||||||
|
* This means, using "aomit_field" both as ref-count and the lock bit is
|
||||||
|
* fine. */
|
||||||
|
|
||||||
|
if (!g_atomic_int_dec_and_test (&wrdata->atomic_field))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
#if G_ENABLE_DEBUG
|
#if G_ENABLE_DEBUG
|
||||||
@ -294,7 +344,7 @@ weak_ref_data_unref (WeakRefData *wrdata)
|
|||||||
* should have been cleared.
|
* should have been cleared.
|
||||||
*
|
*
|
||||||
* Calling weak_ref_data_clear_list() should be unnecessary. */
|
* Calling weak_ref_data_clear_list() should be unnecessary. */
|
||||||
g_assert (!wrdata->list);
|
g_assert (wrdata->len == 0);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
g_free_sized (wrdata, sizeof (WeakRefData));
|
g_free_sized (wrdata, sizeof (WeakRefData));
|
||||||
@ -306,14 +356,14 @@ weak_ref_data_lock (WeakRefData *wrdata)
|
|||||||
/* Note that while holding a _weak_ref_lock() on the @weak_ref, we MUST not acquire a
|
/* Note that while holding a _weak_ref_lock() on the @weak_ref, we MUST not acquire a
|
||||||
* weak_ref_data_lock() on the @wrdata. The other way around! */
|
* weak_ref_data_lock() on the @wrdata. The other way around! */
|
||||||
if (wrdata)
|
if (wrdata)
|
||||||
g_bit_lock (&wrdata->lock_flags, 0);
|
g_bit_lock (&wrdata->atomic_field, WEAK_REF_DATA_LOCK_BIT);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
weak_ref_data_unlock (WeakRefData *wrdata)
|
weak_ref_data_unlock (WeakRefData *wrdata)
|
||||||
{
|
{
|
||||||
if (wrdata)
|
if (wrdata)
|
||||||
g_bit_unlock (&wrdata->lock_flags, 0);
|
g_bit_unlock (&wrdata->atomic_field, WEAK_REF_DATA_LOCK_BIT);
|
||||||
}
|
}
|
||||||
|
|
||||||
static gpointer
|
static gpointer
|
||||||
@ -328,13 +378,15 @@ weak_ref_data_get_or_create_cb (GQuark key_id,
|
|||||||
if (!wrdata)
|
if (!wrdata)
|
||||||
{
|
{
|
||||||
wrdata = g_new (WeakRefData, 1);
|
wrdata = g_new (WeakRefData, 1);
|
||||||
*wrdata = (WeakRefData){
|
|
||||||
/* The initial ref-count is 1. This one is owned by the GData until the
|
/* The initial ref-count is 1. This one is owned by the GData until the
|
||||||
* object gets destroyed. */
|
* object gets destroyed.
|
||||||
.ref_count = 1,
|
*
|
||||||
.lock_flags = 0,
|
* The WEAK_REF_DATA_LOCK_BIT bit is of course initially unset. */
|
||||||
.list = NULL,
|
wrdata->atomic_field = 1;
|
||||||
};
|
wrdata->len = 0;
|
||||||
|
/* Other fields are left uninitialized. They are only considered with a positive @len. */
|
||||||
|
|
||||||
*data = wrdata;
|
*data = wrdata;
|
||||||
*destroy_notify = (GDestroyNotify) weak_ref_data_unref;
|
*destroy_notify = (GDestroyNotify) weak_ref_data_unref;
|
||||||
|
|
||||||
@ -387,6 +439,110 @@ weak_ref_data_get_surely (GObject *object)
|
|||||||
return wrdata;
|
return wrdata;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static gint32
|
||||||
|
weak_ref_data_list_find (WeakRefData *wrdata, GWeakRef *weak_ref)
|
||||||
|
{
|
||||||
|
if (wrdata->len == 1u)
|
||||||
|
{
|
||||||
|
if (wrdata->list.one == weak_ref)
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
guint16 i;
|
||||||
|
|
||||||
|
for (i = 0; i < wrdata->len; i++)
|
||||||
|
{
|
||||||
|
if (wrdata->list.many[i] == weak_ref)
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
static gboolean
|
||||||
|
weak_ref_data_list_add (WeakRefData *wrdata, GWeakRef *weak_ref)
|
||||||
|
{
|
||||||
|
if (wrdata->len == 0u)
|
||||||
|
wrdata->list.one = weak_ref;
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (wrdata->len == 1u)
|
||||||
|
{
|
||||||
|
GWeakRef *weak_ref2 = wrdata->list.one;
|
||||||
|
|
||||||
|
wrdata->alloc = 4u;
|
||||||
|
wrdata->list.many = g_new (GWeakRef *, wrdata->alloc);
|
||||||
|
wrdata->list.many[0] = weak_ref2;
|
||||||
|
}
|
||||||
|
else if (wrdata->len == wrdata->alloc)
|
||||||
|
{
|
||||||
|
guint16 alloc;
|
||||||
|
|
||||||
|
alloc = wrdata->alloc * 2u;
|
||||||
|
if (G_UNLIKELY (alloc < wrdata->len))
|
||||||
|
{
|
||||||
|
if (wrdata->len == G_MAXUINT16)
|
||||||
|
return FALSE;
|
||||||
|
alloc = G_MAXUINT16;
|
||||||
|
}
|
||||||
|
wrdata->list.many = g_renew (GWeakRef *, wrdata->list.many, alloc);
|
||||||
|
wrdata->alloc = alloc;
|
||||||
|
}
|
||||||
|
|
||||||
|
wrdata->list.many[wrdata->len] = weak_ref;
|
||||||
|
}
|
||||||
|
|
||||||
|
wrdata->len++;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static GWeakRef *
|
||||||
|
weak_ref_data_list_remove (WeakRefData *wrdata, guint16 idx, gboolean allow_shrink)
|
||||||
|
{
|
||||||
|
GWeakRef *weak_ref;
|
||||||
|
|
||||||
|
#if G_ENABLE_DEBUG
|
||||||
|
g_assert (idx < wrdata->len);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
wrdata->len--;
|
||||||
|
|
||||||
|
if (wrdata->len == 0u)
|
||||||
|
{
|
||||||
|
weak_ref = wrdata->list.one;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
weak_ref = wrdata->list.many[idx];
|
||||||
|
|
||||||
|
if (wrdata->len == 1u)
|
||||||
|
{
|
||||||
|
GWeakRef *weak_ref2 = wrdata->list.many[idx == 0 ? 1 : 0];
|
||||||
|
|
||||||
|
g_free (wrdata->list.many);
|
||||||
|
wrdata->list.one = weak_ref2;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
wrdata->list.many[idx] = wrdata->list.many[wrdata->len];
|
||||||
|
|
||||||
|
if (allow_shrink && G_UNLIKELY (wrdata->len <= wrdata->alloc / 4u))
|
||||||
|
{
|
||||||
|
/* Shrink the buffer. When 75% are empty, shrink it to 50%. */
|
||||||
|
if (wrdata->alloc == G_MAXUINT16)
|
||||||
|
wrdata->alloc = ((guint32) G_MAXUINT16 + 1u) / 2u;
|
||||||
|
else
|
||||||
|
wrdata->alloc /= 2u;
|
||||||
|
wrdata->list.many = g_renew (GWeakRef *, wrdata->list.many, wrdata->alloc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return weak_ref;
|
||||||
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
weak_ref_data_has (GObject *object, WeakRefData *wrdata, WeakRefData **out_new_wrdata)
|
weak_ref_data_has (GObject *object, WeakRefData *wrdata, WeakRefData **out_new_wrdata)
|
||||||
{
|
{
|
||||||
@ -5290,12 +5446,14 @@ _weak_ref_unlock_and_set (GWeakRef *weak_ref, GObject *object)
|
|||||||
static void
|
static void
|
||||||
weak_ref_data_clear_list (WeakRefData *wrdata, GObject *object)
|
weak_ref_data_clear_list (WeakRefData *wrdata, GObject *object)
|
||||||
{
|
{
|
||||||
while (wrdata->list)
|
while (wrdata->len > 0u)
|
||||||
{
|
{
|
||||||
GWeakRef *weak_ref = wrdata->list->data;
|
GWeakRef *weak_ref;
|
||||||
gpointer ptr;
|
gpointer ptr;
|
||||||
|
|
||||||
wrdata->list = g_slist_remove (wrdata->list, weak_ref);
|
/* pass "allow_shrink=FALSE", so we don't reallocate needlessly. We
|
||||||
|
* anyway are about to clear the entire list. */
|
||||||
|
weak_ref = weak_ref_data_list_remove (wrdata, wrdata->len - 1u, FALSE);
|
||||||
|
|
||||||
/* Fast-path. Most likely @weak_ref is currently not locked, so we can
|
/* Fast-path. Most likely @weak_ref is currently not locked, so we can
|
||||||
* just atomically set the pointer to NULL. */
|
* just atomically set the pointer to NULL. */
|
||||||
@ -5407,17 +5565,13 @@ _weak_ref_set (GWeakRef *weak_ref,
|
|||||||
|
|
||||||
if (old_object)
|
if (old_object)
|
||||||
{
|
{
|
||||||
#if G_ENABLE_DEBUG
|
gint32 idx;
|
||||||
g_assert (g_slist_find (old_wrdata->list, weak_ref));
|
|
||||||
#endif
|
idx = weak_ref_data_list_find (old_wrdata, weak_ref);
|
||||||
if (!old_wrdata->list)
|
if (idx < 0)
|
||||||
{
|
|
||||||
g_critical ("unexpected missing GWeakRef data");
|
g_critical ("unexpected missing GWeakRef data");
|
||||||
}
|
|
||||||
else
|
else
|
||||||
{
|
weak_ref_data_list_remove (old_wrdata, idx, TRUE);
|
||||||
old_wrdata->list = g_slist_remove (old_wrdata->list, weak_ref);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
weak_ref_data_unlock (old_wrdata);
|
weak_ref_data_unlock (old_wrdata);
|
||||||
@ -5425,7 +5579,7 @@ _weak_ref_set (GWeakRef *weak_ref,
|
|||||||
if (new_object)
|
if (new_object)
|
||||||
{
|
{
|
||||||
#if G_ENABLE_DEBUG
|
#if G_ENABLE_DEBUG
|
||||||
g_assert (!g_slist_find (new_wrdata->list, weak_ref));
|
g_assert (weak_ref_data_list_find (new_wrdata, weak_ref) < 0);
|
||||||
#endif
|
#endif
|
||||||
if (g_atomic_int_get (&new_object->ref_count) < 1)
|
if (g_atomic_int_get (&new_object->ref_count) < 1)
|
||||||
{
|
{
|
||||||
@ -5434,7 +5588,11 @@ _weak_ref_set (GWeakRef *weak_ref,
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
new_wrdata->list = g_slist_prepend (new_wrdata->list, weak_ref);
|
if (!weak_ref_data_list_add (new_wrdata, weak_ref))
|
||||||
|
{
|
||||||
|
g_critical ("Too many GWeakRef registered");
|
||||||
|
new_object = NULL;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -767,6 +767,59 @@ test_weak_ref_in_toggle_notify (void)
|
|||||||
g_assert_null (g_weak_ref_get (&weak));
|
g_assert_null (g_weak_ref_get (&weak));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
test_weak_ref_many (void)
|
||||||
|
{
|
||||||
|
const guint N = g_test_slow ()
|
||||||
|
? G_MAXUINT16
|
||||||
|
: 211;
|
||||||
|
const guint PRIME = 1048583;
|
||||||
|
GObject *obj;
|
||||||
|
GWeakRef *weak_refs;
|
||||||
|
GWeakRef weak_ref1;
|
||||||
|
guint j;
|
||||||
|
guint n;
|
||||||
|
guint i;
|
||||||
|
|
||||||
|
obj = g_object_new (G_TYPE_OBJECT, NULL);
|
||||||
|
|
||||||
|
weak_refs = g_new (GWeakRef, N);
|
||||||
|
|
||||||
|
/* We register them in a somewhat juggled order. That's because below, we will clear them
|
||||||
|
* again, and we don't want to always clear them in the same order as they were registered.
|
||||||
|
* For that, we calculate the actual index by jumping around by adding a prime number. */
|
||||||
|
j = (g_test_rand_int () % (N + 1));
|
||||||
|
for (i = 0; i < N; i++)
|
||||||
|
{
|
||||||
|
j = (j + PRIME) % N;
|
||||||
|
g_weak_ref_init (&weak_refs[j], obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (N == G_MAXUINT16)
|
||||||
|
{
|
||||||
|
g_test_expect_message ("GLib-GObject", G_LOG_LEVEL_CRITICAL, "*Too many GWeakRef registered");
|
||||||
|
g_weak_ref_init (&weak_ref1, obj);
|
||||||
|
g_test_assert_expected_messages ();
|
||||||
|
g_assert_null (g_weak_ref_get (&weak_ref1));
|
||||||
|
}
|
||||||
|
|
||||||
|
n = g_test_rand_int () % (N + 1u);
|
||||||
|
for (i = 0; i < N; i++)
|
||||||
|
g_weak_ref_set (&weak_refs[i], i < n ? NULL : obj);
|
||||||
|
|
||||||
|
g_object_unref (obj);
|
||||||
|
|
||||||
|
for (i = 0; i < N; i++)
|
||||||
|
g_assert_null (g_weak_ref_get (&weak_refs[i]));
|
||||||
|
|
||||||
|
/* The API would expect us to also call g_weak_ref_clear() on all references
|
||||||
|
* to clean up. In practice, they are already all NULL, so we don't need
|
||||||
|
* that (it would have no effect, with the current implementation of
|
||||||
|
* GWeakRef). */
|
||||||
|
|
||||||
|
g_free (weak_refs);
|
||||||
|
}
|
||||||
|
|
||||||
/*****************************************************************************/
|
/*****************************************************************************/
|
||||||
|
|
||||||
#define CONCURRENT_N_OBJS 5
|
#define CONCURRENT_N_OBJS 5
|
||||||
@ -1584,6 +1637,7 @@ main (int argc, char **argv)
|
|||||||
g_test_add_func ("/object/weak-ref/on-run-dispose", test_weak_ref_on_run_dispose);
|
g_test_add_func ("/object/weak-ref/on-run-dispose", test_weak_ref_on_run_dispose);
|
||||||
g_test_add_func ("/object/weak-ref/on-toggle-notify", test_weak_ref_on_toggle_notify);
|
g_test_add_func ("/object/weak-ref/on-toggle-notify", test_weak_ref_on_toggle_notify);
|
||||||
g_test_add_func ("/object/weak-ref/in-toggle-notify", test_weak_ref_in_toggle_notify);
|
g_test_add_func ("/object/weak-ref/in-toggle-notify", test_weak_ref_in_toggle_notify);
|
||||||
|
g_test_add_func ("/object/weak-ref/many", test_weak_ref_many);
|
||||||
g_test_add_data_func ("/object/weak-ref/concurrent/0", GINT_TO_POINTER (0), test_weak_ref_concurrent);
|
g_test_add_data_func ("/object/weak-ref/concurrent/0", GINT_TO_POINTER (0), test_weak_ref_concurrent);
|
||||||
g_test_add_data_func ("/object/weak-ref/concurrent/1", GINT_TO_POINTER (1), test_weak_ref_concurrent);
|
g_test_add_data_func ("/object/weak-ref/concurrent/1", GINT_TO_POINTER (1), test_weak_ref_concurrent);
|
||||||
g_test_add_func ("/object/toggle-ref", test_toggle_ref);
|
g_test_add_func ("/object/toggle-ref", test_toggle_ref);
|
||||||
|
Loading…
Reference in New Issue
Block a user