Merge branch 'th/weak-ref-lock-2' into 'main'

[th/weak-ref-lock-2] gobject: use per-object bit-lock instead of global RWLock for GWeakRef

Closes #743

See merge request GNOME/glib!3834
This commit is contained in:
Philip Withnall 2024-01-31 16:51:50 +00:00
commit 2638f97b3e
6 changed files with 758 additions and 182 deletions

View File

@ -827,6 +827,119 @@ g_datalist_id_remove_no_notify (GData **datalist,
return ret_data;
}
/*< private >
* g_datalist_id_update_atomic:
* @datalist: the data list
* @key_id: the key to add.
* @callback: (scope call): callback to update (set, remove, steal, update) the
* data.
* @user_data: the user data for @callback.
*
* Will call @callback while holding the lock on @datalist. Be careful to not
* end up calling into another data-list function, because the lock is not
* reentrant and deadlock will happen.
*
* The callback receives the current data and destroy function. If @key_id is
* currently not in @datalist, they will be %NULL. The callback can update
* those pointers, and @datalist will be updated with the result. Note that if
* callback modifies a received data, then it MUST steal it and take ownership
* on it. Possibly by freeing it with the provided destroy function.
*
* The point is to atomically access the entry, while holding a lock
* of @datalist. Without this, the user would have to hold their own mutex
* while handling @key_id entry.
*
* The return value of @callback is not used, except it becomes the return
* value of the function. This is an alternative to returning a result via
* @user_data.
*
* Returns: the value returned by @callback.
*
* Since: 2.80
*/
gpointer
g_datalist_id_update_atomic (GData **datalist,
GQuark key_id,
GDataListUpdateAtomicFunc callback,
gpointer user_data)
{
GData *d;
GDataElt *data;
gpointer new_data;
gpointer result;
GDestroyNotify new_destroy;
guint32 idx;
gboolean to_unlock = TRUE;
d = g_datalist_lock_and_get (datalist);
data = datalist_find (d, key_id, &idx);
if (data)
{
new_data = data->data;
new_destroy = data->destroy;
}
else
{
new_data = NULL;
new_destroy = NULL;
}
result = callback (key_id, &new_data, &new_destroy, user_data);
if (data && !new_data)
{
/* Remove. The callback indicates to drop the entry.
*
* The old data->data was stolen by callback(). */
d->len--;
/* We don't bother to shrink, but if all data are now gone
* we at least free the memory
*/
if (d->len == 0)
{
g_datalist_unlock_and_set (datalist, NULL);
g_free (d);
to_unlock = FALSE;
}
else
{
if (idx != d->len)
*data = d->data[d->len];
}
}
else if (data)
{
/* Update. The callback may have provided new pointers to an existing
* entry.
*
* The old data was stolen by callback(). We only update the pointers and
* are done. */
data->data = new_data;
data->destroy = new_destroy;
}
else if (!data && !new_data)
{
/* Absent. No change. The entry didn't exist and still does not. */
}
else
{
/* Add. Add a new entry that didn't exist previously. */
if (datalist_append (&d, key_id, new_data, new_destroy))
{
g_datalist_unlock_and_set (datalist, d);
to_unlock = FALSE;
}
}
if (to_unlock)
g_datalist_unlock (datalist);
return result;
}
/**
* g_dataset_id_get_data:
* @dataset_location: (not nullable): the location identifying the dataset.

View File

@ -38,6 +38,29 @@ G_BEGIN_DECLS
#define G_DATALIST_GET_FLAGS(datalist) \
((gsize) g_atomic_pointer_get (datalist) & G_DATALIST_FLAGS_MASK)
/*< private >
* GDataListUpdateAtomicFunc:
* @key_id: ID of the entry to update
* @data: (inout) (nullable) (not optional): the existing data corresponding
* to @key_id, and return location for the new value for it
* @destroy_notify: (inout) (nullable) (not optional): the existing destroy
* notify function for @data, and return location for the destroy notify
* function for the new value for it
* @user_data: user data passed in to [func@GLib.datalist_id_update_atomic]
*
* Callback from [func@GLib.datalist_id_update_atomic].
*
* Since: 2.80
*/
typedef gpointer (*GDataListUpdateAtomicFunc) (GQuark key_id,
gpointer *data,
GDestroyNotify *destroy_notify,
gpointer user_data);
gpointer g_datalist_id_update_atomic (GData **datalist,
GQuark key_id,
GDataListUpdateAtomicFunc callback,
gpointer user_data);
G_END_DECLS

View File

@ -24,6 +24,7 @@
#include "glib-private.h"
#include "glib-init.h"
#include "gutilsprivate.h"
#include "gdatasetprivate.h"
#ifdef USE_INVALID_PARAMETER_HANDLER
#include <crtdbg.h>
@ -74,6 +75,8 @@ glib__private__ (void)
g_uri_get_default_scheme_port,
g_set_prgname_once,
g_datalist_id_update_atomic,
};
return &table;

View File

@ -23,6 +23,7 @@
#include <glib.h>
#include "gwakeup.h"
#include "gstdioprivate.h"
#include "gdatasetprivate.h"
/* gcc defines __SANITIZE_ADDRESS__, clang sets the address_sanitizer
* feature flag.
@ -291,6 +292,11 @@ typedef struct {
/* See gutils.c */
gboolean (* g_set_prgname_once) (const gchar *prgname);
gpointer (*g_datalist_id_update_atomic) (GData **datalist,
GQuark key_id,
GDataListUpdateAtomicFunc callback,
gpointer user_data);
/* Add other private functions here, initialize them in glib-private.c */
} GLibPrivateVTable;
@ -327,4 +333,8 @@ guint g_uint_hash (gconstpointer v);
#undef G_THREAD_LOCAL
#endif
/* Convenience wrapper to call private g_datalist_id_update_atomic() function. */
#define _g_datalist_id_update_atomic(datalist, key_id, callback, user_data) \
(GLIB_PRIVATE_CALL (g_datalist_id_update_atomic) ((datalist), (key_id), (callback), (user_data)))
#endif /* __GLIB_PRIVATE_H__ */

View File

@ -1,6 +1,8 @@
#include <glib.h>
#include <stdlib.h>
#include "glib/glib-private.h"
static void
test_quark_basic (void)
{
@ -319,6 +321,58 @@ test_datalist_id_remove_multiple_destroy_order (void)
g_assert_cmpint (destroy_count, ==, 3);
}
static gpointer
_update_atomic_cb (GQuark key_id,
gpointer *data,
GDestroyNotify *destroy_notify,
gpointer user_data)
{
const char *op = user_data;
char *data_entry = *data;
g_assert_nonnull (op);
if (strcmp (op, "create") == 0)
{
g_assert_cmpstr (data_entry, ==, NULL);
g_assert_null (*destroy_notify);
*data = g_strdup ("hello");
*destroy_notify = g_free;
}
else if (strcmp (op, "remove") == 0)
{
g_assert_cmpstr (data_entry, ==, "hello");
g_assert_true (*destroy_notify == g_free);
g_free (data_entry);
*data = NULL;
}
else
g_assert_not_reached ();
return "result";
}
static void
test_datalist_update_atomic (void)
{
GQuark one = g_quark_from_static_string ("one");
GData *list = NULL;
const char *result;
result = _g_datalist_id_update_atomic (&list, one, _update_atomic_cb, "create");
g_assert_cmpstr (result, ==, "result");
g_assert_cmpstr ((const char *) g_datalist_id_get_data (&list, one), ==, "hello");
g_datalist_id_set_data_full (&list, one, g_strdup ("hello"), g_free);
result = _g_datalist_id_update_atomic (&list, one, _update_atomic_cb, "remove");
g_assert_cmpstr (result, ==, "result");
g_assert_null (list);
}
int
main (int argc, char *argv[])
{
@ -337,6 +391,7 @@ main (int argc, char *argv[])
g_test_add_func ("/datalist/id-remove-multiple", test_datalist_id_remove_multiple);
g_test_add_func ("/datalist/id-remove-multiple-destroy-order",
test_datalist_id_remove_multiple_destroy_order);
g_test_add_func ("/datalist/update-atomic", test_datalist_update_atomic);
return g_test_run ();
}

View File

@ -108,6 +108,7 @@ enum {
#define OPTIONAL_FLAG_HAS_SIGNAL_HANDLER (1 << 1) /* Set if object ever had a signal handler */
#define OPTIONAL_FLAG_HAS_NOTIFY_HANDLER (1 << 2) /* Same, specifically for "notify" */
#define OPTIONAL_FLAG_LOCK (1 << 3) /* _OPTIONAL_BIT_LOCK */
#define OPTIONAL_FLAG_EVER_HAD_WEAK_REF (1 << 4) /* whether on the object ever g_weak_ref_set() was called. */
/* We use g_bit_lock(), which only supports one lock per integer.
*
@ -206,10 +207,11 @@ static void g_object_dispatch_properties_changed (GObject *object,
GParamSpec **pspecs);
static guint object_floating_flag_handler (GObject *object,
gint job);
static inline void object_set_optional_flags (GObject *object,
guint flags);
static void object_interface_check_properties (gpointer check_data,
gpointer g_iface);
static void weak_locations_free_unlocked (GSList **weak_locations);
/* --- typedefs --- */
typedef struct _GObjectNotifyQueue GObjectNotifyQueue;
@ -229,9 +231,7 @@ static GQuark quark_notify_queue;
static GParamSpecPool *pspec_pool = NULL;
static gulong gobject_signals[LAST_SIGNAL] = { 0, };
static guint (*floating_flag_handler) (GObject*, gint) = object_floating_flag_handler;
/* qdata pointing to GSList<GWeakRef *>, protected by weak_locations_lock */
static GQuark quark_weak_locations = 0;
static GRWLock weak_locations_lock;
#if HAVE_PRIVATE
G_ALWAYS_INLINE static inline GObjectPrivate *
@ -251,6 +251,209 @@ object_get_optional_flags_p (GObject *object)
#endif
}
/*****************************************************************************/
/* For GWeakRef, we need to take a lock per-object. However, in various cases
* we cannot take a strong reference on the object to keep it alive. So the
* mutex cannot be in the object itself, because when we want to release the
* lock, we can no longer access object.
*
* Instead, the mutex is on the WeakRefData, which is itself ref-counted
* and has a separate lifetime from the object. */
typedef struct
{
gatomicrefcount ref_count;
gint lock_flags; /* (atomic) */
GSList *list; /* (element-type GWeakRef) (owned) */
} WeakRefData;
static void weak_ref_data_clear_list (WeakRefData *wrdata, GObject *object);
static WeakRefData *
weak_ref_data_ref (WeakRefData *wrdata)
{
#if G_ENABLE_DEBUG
g_assert (wrdata);
#endif
g_atomic_ref_count_inc (&wrdata->ref_count);
return wrdata;
}
static void
weak_ref_data_unref (WeakRefData *wrdata)
{
if (!wrdata)
return;
if (!g_atomic_ref_count_dec (&wrdata->ref_count))
return;
#if G_ENABLE_DEBUG
/* We expect that the list of weak locations is empty at this point.
* During g_object_unref() (_object_unref_clear_weak_locations()) it
* should have been cleared.
*
* Calling weak_ref_data_clear_list() should be unnecessary. */
g_assert (!wrdata->list);
#endif
g_free_sized (wrdata, sizeof (WeakRefData));
}
static void
weak_ref_data_lock (WeakRefData *wrdata)
{
/* Note that while holding a _weak_ref_lock() on the @weak_ref, we MUST not acquire a
* weak_ref_data_lock() on the @wrdata. The other way around! */
if (wrdata)
g_bit_lock (&wrdata->lock_flags, 0);
}
static void
weak_ref_data_unlock (WeakRefData *wrdata)
{
if (wrdata)
g_bit_unlock (&wrdata->lock_flags, 0);
}
static gpointer
weak_ref_data_get_or_create_cb (GQuark key_id,
gpointer *data,
GDestroyNotify *destroy_notify,
gpointer user_data)
{
WeakRefData *wrdata = *data;
GObject *object = user_data;
if (!wrdata)
{
wrdata = g_new (WeakRefData, 1);
*wrdata = (WeakRefData){
/* The initial ref-count is 1. This one is owned by the GData until the
* object gets destroyed. */
.ref_count = 1,
.lock_flags = 0,
.list = NULL,
};
*data = wrdata;
*destroy_notify = (GDestroyNotify) weak_ref_data_unref;
/* Mark the @object that it was ever involved with GWeakRef. This flag
* will stick until @object gets destroyed, just like the WeakRefData
* also won't be freed for the remainder of the life of @object. */
object_set_optional_flags (object, OPTIONAL_FLAG_EVER_HAD_WEAK_REF);
}
return wrdata;
}
static WeakRefData *
weak_ref_data_get_or_create (GObject *object)
{
if (!object)
return NULL;
return _g_datalist_id_update_atomic (&object->qdata,
quark_weak_locations,
weak_ref_data_get_or_create_cb,
object);
}
static WeakRefData *
weak_ref_data_get (GObject *object)
{
return g_datalist_id_get_data (&object->qdata, quark_weak_locations);
}
static WeakRefData *
weak_ref_data_get_surely (GObject *object)
{
WeakRefData *wrdata;
/* The "surely" part is about that we expect to have a WeakRefData.
*
* Note that once a GObject gets a WeakRefData (during g_weak_ref_set() and
* weak_ref_data_get_or_create()), it sticks and is not freed until the
* object gets destroyed.
*
* Maybe we could release the unused WeakRefData in g_weak_ref_set(), but
* then we would always need to take a reference during weak_ref_data_get().
* That is likely not worth it. */
wrdata = weak_ref_data_get (object);
#if G_ENABLE_DEBUG
g_assert (wrdata);
#endif
return wrdata;
}
static gboolean
weak_ref_data_has (GObject *object, WeakRefData *wrdata, WeakRefData **out_new_wrdata)
{
WeakRefData *wrdata2;
/* Check whether @object has @wrdata as WeakRefData. Note that an GObject's
* WeakRefData never changes (until destruction, once it's allocated).
*
* If you thus hold a reference to a @wrdata, you can check that the @object
* is still the same as the object where we got the @wrdata originally from.
*
* You couldn't do this check by using pointer equality of the GObject pointers,
* when you cannot hold strong references on the objects involved. Because then
* the object pointer might be dangling (and even destroyed and recreated as another
* object at the same memory location).
*
* Basically, weak_ref_data_has() is to compare for equality of two GObject pointers,
* when we cannot hold a strong reference on both. Instead, we earlier took a reference
* on the @wrdata and compare that instead.
*/
if (!object)
{
/* If @object is NULL, then it does have a NULL @wrdata, and we return
* TRUE in the case. That's a convenient special case for some callers.
*
* In other words, weak_ref_data_has(NULL, NULL, out_new_wrdata) is TRUE.
*/
#if G_ENABLE_DEBUG
g_assert (!out_new_wrdata);
#endif
return !wrdata;
}
if (!wrdata)
{
/* We only call this function with an @object that was previously
* registered as GWeakRef.
*
* That means, our @object will have a wrdata, and the result of the
* evaluation will be %FALSE. */
if (out_new_wrdata)
*out_new_wrdata = weak_ref_data_ref (weak_ref_data_get (object));
#if G_ENABLE_DEBUG
g_assert (out_new_wrdata
? *out_new_wrdata
: weak_ref_data_get (object));
#endif
return FALSE;
}
wrdata2 = weak_ref_data_get_surely (object);
if (wrdata == wrdata2)
{
if (out_new_wrdata)
*out_new_wrdata = NULL;
return TRUE;
}
if (out_new_wrdata)
*out_new_wrdata = weak_ref_data_ref (wrdata2);
return FALSE;
}
/*****************************************************************************/
#if defined(G_ENABLE_DEBUG) && defined(G_THREAD_LOCAL)
/* Using this thread-local global is sufficient to guard the per-object
* locking, because while the current thread holds a lock on one object, it
@ -1469,14 +1672,25 @@ g_object_dispatch_properties_changed (GObject *object,
void
g_object_run_dispose (GObject *object)
{
WeakRefData *wrdata;
g_return_if_fail (G_IS_OBJECT (object));
g_return_if_fail (g_atomic_int_get (&object->ref_count) > 0);
g_object_ref (object);
TRACE (GOBJECT_OBJECT_DISPOSE(object,G_TYPE_FROM_INSTANCE(object), 0));
G_OBJECT_GET_CLASS (object)->dispose (object);
TRACE (GOBJECT_OBJECT_DISPOSE_END(object,G_TYPE_FROM_INSTANCE(object), 0));
g_datalist_id_remove_data (&object->qdata, quark_weak_locations);
if ((object_get_optional_flags (object) & OPTIONAL_FLAG_EVER_HAD_WEAK_REF))
{
wrdata = weak_ref_data_get_surely (object);
weak_ref_data_lock (wrdata);
weak_ref_data_clear_list (wrdata, object);
weak_ref_data_unlock (wrdata);
}
g_object_unref (object);
}
@ -3839,77 +4053,56 @@ gpointer
static gboolean
_object_unref_clear_weak_locations (GObject *object, gint *p_old_ref, gboolean do_unref)
{
GSList **weak_locations;
WeakRefData *wrdata;
gboolean success;
/* Fast path, for objects that never had a GWeakRef registered. */
if (!(object_get_optional_flags (object) & OPTIONAL_FLAG_EVER_HAD_WEAK_REF))
{
/* The caller previously just checked atomically that the ref-count was
* one.
*
* At this point still, @object never ever had a GWeakRef registered.
* That means, nobody else holds a strong reference and also nobody else
* can hold a weak reference, to race against obtaining another
* reference. We are good to proceed. */
if (do_unref)
{
if (!g_atomic_int_compare_and_exchange ((gint *) &object->ref_count, 1, 0))
{
#if G_ENABLE_DEBUG
g_assert_not_reached ();
#endif
}
}
return TRUE;
}
/* Slow path. We must obtain a lock on the @wrdata, to atomically release
* weak references and check that the ref count is as expected. */
wrdata = weak_ref_data_get_surely (object);
weak_ref_data_lock (wrdata);
if (do_unref)
{
gboolean unreffed = FALSE;
/* Fast path for the final unref using a read-lck only. We check whether
* we have weak_locations and drop ref count to zero under a reader lock. */
g_rw_lock_reader_lock (&weak_locations_lock);
weak_locations = g_datalist_id_get_data (&object->qdata, quark_weak_locations);
if (!weak_locations)
{
unreffed = g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
1, 0,
p_old_ref);
g_rw_lock_reader_unlock (&weak_locations_lock);
return unreffed;
}
g_rw_lock_reader_unlock (&weak_locations_lock);
/* We have weak-locations. Note that we are here already after dispose(). That
* means, during dispose a GWeakRef was registered (very unusual). */
g_rw_lock_writer_lock (&weak_locations_lock);
if (!g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
1, 0,
p_old_ref))
{
g_rw_lock_writer_unlock (&weak_locations_lock);
return FALSE;
}
weak_locations = g_datalist_id_remove_no_notify (&object->qdata, quark_weak_locations);
g_clear_pointer (&weak_locations, weak_locations_free_unlocked);
g_rw_lock_writer_unlock (&weak_locations_lock);
return TRUE;
success = g_atomic_int_compare_and_exchange_full ((gint *) &object->ref_count,
1, 0,
p_old_ref);
}
weak_locations = g_datalist_id_get_data (&object->qdata, quark_weak_locations);
if (weak_locations != NULL)
else
{
g_rw_lock_writer_lock (&weak_locations_lock);
*p_old_ref = g_atomic_int_get (&object->ref_count);
if (*p_old_ref != 1)
{
g_rw_lock_writer_unlock (&weak_locations_lock);
return FALSE;
}
weak_locations = g_datalist_id_remove_no_notify (&object->qdata, quark_weak_locations);
g_clear_pointer (&weak_locations, weak_locations_free_unlocked);
g_rw_lock_writer_unlock (&weak_locations_lock);
return TRUE;
*p_old_ref = g_atomic_int_get ((gint *) &object->ref_count);
success = (*p_old_ref == 1);
}
/* We don't need to re-fetch p_old_ref or check that it's still 1. The caller
* did that already. We are good.
*
* Note that in this case we fetched old_ref and weak_locations separately,
* without a lock. But this is fine. We are still before calling dispose().
* If there is a race at this point, the same race can happen between
* _object_unref_clear_weak_locations() and dispose() call. That is handled
* just fine. */
return TRUE;
if (success)
weak_ref_data_clear_list (wrdata, object);
weak_ref_data_unlock (wrdata);
return success;
}
/**
@ -4032,6 +4225,10 @@ retry_beginning:
G_OBJECT_GET_CLASS (object)->dispose (object);
TRACE (GOBJECT_OBJECT_DISPOSE_END (object, G_TYPE_FROM_INSTANCE (object), 1));
/* Must re-fetch old-ref. _object_unref_clear_weak_locations() relies on
* that. */
old_ref = g_atomic_int_get (&object->ref_count);
retry_decrement:
/* Here, old_ref is 1 if we just come from dispose(). If the object was resurrected,
* we can hit `goto retry_decrement` and be here with a larger old_ref. */
@ -4042,6 +4239,15 @@ retry_decrement:
* queue. */
g_object_notify_queue_thaw (object, nqueue, FALSE);
nqueue = NULL;
/* Note at this point, @old_ref might be wrong.
*
* Also note that _object_unref_clear_weak_locations() requires that we
* atomically checked that @old_ref is 1. However, as @old_ref is larger
* than 1, that will not be called. Instead, all other code paths below,
* handle the possibility of a bogus @old_ref.
*
* No need to re-fetch. */
}
if (old_ref > 2)
@ -4080,9 +4286,8 @@ retry_decrement:
return;
}
/* old_ref is 1, we are about to drop the reference count to zero. That is
* done by _object_unref_clear_weak_locations() under a weak_locations_lock
* so that there is no race with g_weak_ref_set(). */
/* old_ref is (atomically!) checked to be 1, we are about to drop the
* reference count to zero in _object_unref_clear_weak_locations(). */
if (!_object_unref_clear_weak_locations (object, &old_ref, TRUE))
goto retry_decrement;
@ -5044,6 +5249,201 @@ g_initially_unowned_class_init (GInitiallyUnownedClass *klass)
* without first having or creating a strong reference to the object.
*/
#define WEAK_REF_LOCK_BIT 0
static GObject *
_weak_ref_clean_pointer (gpointer ptr)
{
/* Drop the lockbit WEAK_REF_LOCK_BIT from @ptr (if set). */
return g_pointer_bit_lock_mask_ptr (ptr, WEAK_REF_LOCK_BIT, FALSE, 0, NULL);
}
static void
_weak_ref_lock (GWeakRef *weak_ref, GObject **out_object)
{
/* Note that while holding a _weak_ref_lock() on the @weak_ref, we MUST not acquire a
* weak_ref_data_lock() on the @wrdata. The other way around! */
if (out_object)
{
guintptr ptr;
g_pointer_bit_lock_and_get (&weak_ref->priv.p, WEAK_REF_LOCK_BIT, &ptr);
*out_object = _weak_ref_clean_pointer ((gpointer) ptr);
}
else
g_pointer_bit_lock (&weak_ref->priv.p, WEAK_REF_LOCK_BIT);
}
static void
_weak_ref_unlock (GWeakRef *weak_ref)
{
g_pointer_bit_unlock (&weak_ref->priv.p, WEAK_REF_LOCK_BIT);
}
static void
_weak_ref_unlock_and_set (GWeakRef *weak_ref, GObject *object)
{
g_pointer_bit_unlock_and_set (&weak_ref->priv.p, WEAK_REF_LOCK_BIT, object, 0);
}
static void
weak_ref_data_clear_list (WeakRefData *wrdata, GObject *object)
{
while (wrdata->list)
{
GWeakRef *weak_ref = wrdata->list->data;
gpointer ptr;
wrdata->list = g_slist_remove (wrdata->list, weak_ref);
/* Fast-path. Most likely @weak_ref is currently not locked, so we can
* just atomically set the pointer to NULL. */
ptr = g_atomic_pointer_get (&weak_ref->priv.p);
#if G_ENABLE_DEBUG
g_assert (G_IS_OBJECT (_weak_ref_clean_pointer (ptr)));
g_assert (!object || object == _weak_ref_clean_pointer (ptr));
#endif
if (G_LIKELY (ptr == _weak_ref_clean_pointer (ptr)))
{
/* The pointer is unlocked. Try an atomic compare-and-exchange... */
if (g_atomic_pointer_compare_and_exchange (&weak_ref->priv.p, ptr, NULL))
{
/* Done. Go to the next. */
continue;
}
}
/* The @weak_ref is locked. Acquire the lock to set the pointer to NULL. */
_weak_ref_lock (weak_ref, NULL);
_weak_ref_unlock_and_set (weak_ref, NULL);
}
}
static void
_weak_ref_set (GWeakRef *weak_ref,
GObject *new_object,
gboolean called_by_init)
{
WeakRefData *old_wrdata;
WeakRefData *new_wrdata;
GObject *old_object;
new_wrdata = weak_ref_data_get_or_create (new_object);
#if G_ENABLE_DEBUG
g_assert (!new_object || object_get_optional_flags (new_object) & OPTIONAL_FLAG_EVER_HAD_WEAK_REF);
#endif
if (called_by_init)
{
/* The caller is g_weak_ref_init(). We know that the weak_ref should be
* NULL. We thus set @old_wrdata to NULL without checking.
*
* Also important, the caller ensured that @new_object is not NULL. So we
* are expected to set @weak_ref from NULL to a non-NULL @new_object. */
old_wrdata = NULL;
#if G_ENABLE_DEBUG
g_assert (new_object);
#endif
}
else
{
/* We must get a wrdata object @old_wrdata for the current @old_object. */
_weak_ref_lock (weak_ref, &old_object);
if (old_object == new_object)
{
/* Already set. We are done. */
_weak_ref_unlock (weak_ref);
return;
}
old_wrdata = old_object
? weak_ref_data_ref (weak_ref_data_get (old_object))
: NULL;
_weak_ref_unlock (weak_ref);
}
/* We need a lock on @old_wrdata, @new_wrdata and @weak_ref. We need to take
* these locks in a certain order to avoid deadlock. We sort them by pointer
* value.
*
* Note that @old_wrdata or @new_wrdata may be NULL, which is handled
* correctly.
*
* Note that @old_wrdata and @new_wrdata are never identical at this point.
*/
if (new_wrdata && old_wrdata && (((guintptr) (gpointer) old_wrdata) < ((guintptr) ((gpointer) new_wrdata))))
{
weak_ref_data_lock (old_wrdata);
weak_ref_data_lock (new_wrdata);
}
else
{
weak_ref_data_lock (new_wrdata);
weak_ref_data_lock (old_wrdata);
}
_weak_ref_lock (weak_ref, &old_object);
if (!weak_ref_data_has (old_object, old_wrdata, NULL))
{
/* A race. @old_object no longer has the expected @old_wrdata after
* getting all the locks. */
if (old_object)
{
/* We lost the race and find a different object set. It's fine, our
* action was lost in the race and we are done. No need to retry. */
weak_ref_data_unlock (old_wrdata);
weak_ref_data_unlock (new_wrdata);
_weak_ref_unlock (weak_ref);
weak_ref_data_unref (old_wrdata);
return;
}
/* @old_object is NULL after a race. We didn't expect that, but it's
* fine. Proceed to set @new_object... */
}
if (old_object)
{
#if G_ENABLE_DEBUG
g_assert (g_slist_find (old_wrdata->list, weak_ref));
#endif
if (!old_wrdata->list)
{
g_critical ("unexpected missing GWeakRef data");
}
else
{
old_wrdata->list = g_slist_remove (old_wrdata->list, weak_ref);
}
}
weak_ref_data_unlock (old_wrdata);
if (new_object)
{
#if G_ENABLE_DEBUG
g_assert (!g_slist_find (new_wrdata->list, weak_ref));
#endif
if (g_atomic_int_get (&new_object->ref_count) < 1)
{
g_critical ("calling g_weak_ref_set() with already destroyed object");
new_object = NULL;
}
else
{
new_wrdata->list = g_slist_prepend (new_wrdata->list, weak_ref);
}
}
_weak_ref_unlock_and_set (weak_ref, new_object);
weak_ref_data_unlock (new_wrdata);
weak_ref_data_unref (old_wrdata);
}
/**
* g_weak_ref_init: (skip)
* @weak_ref: (inout): uninitialized or empty location for a weak
@ -5064,12 +5464,19 @@ g_initially_unowned_class_init (GInitiallyUnownedClass *klass)
*/
void
g_weak_ref_init (GWeakRef *weak_ref,
gpointer object)
gpointer object)
{
weak_ref->priv.p = NULL;
g_return_if_fail (weak_ref);
g_return_if_fail (object == NULL || G_IS_OBJECT (object));
g_atomic_pointer_set (&weak_ref->priv.p, NULL);
if (object)
g_weak_ref_set (weak_ref, object);
{
/* We give a hint that the weak_ref is currently NULL. Unlike
* g_weak_ref_set(), we then don't need the extra lock just to
* find out that we have no object. */
_weak_ref_set (weak_ref, object, TRUE);
}
}
/**
@ -5116,20 +5523,86 @@ g_weak_ref_clear (GWeakRef *weak_ref)
gpointer
g_weak_ref_get (GWeakRef *weak_ref)
{
WeakRefData *wrdata;
WeakRefData *new_wrdata;
GToggleNotify toggle_notify = NULL;
gpointer toggle_data = NULL;
GObject *object;
g_return_val_if_fail (weak_ref, NULL);
g_rw_lock_reader_lock (&weak_locations_lock);
/* We cannot take the strong reference on @object yet. Otherwise,
* _object_unref_clear_weak_locations() might have just taken the lock on
* @wrdata, see that the ref-count is 1 and plan to proceed clearing weak
* locations. If we then take a strong reference here, the object becomes
* alive and well, but _object_unref_clear_weak_locations() would proceed and
* clear the @weak_ref.
*
* We avoid that, by can only taking the strong reference when having a lock
* on @wrdata, so we are in sync with _object_unref_clear_weak_locations().
*
* But first we must get a reference to the @wrdata.
*/
_weak_ref_lock (weak_ref, &object);
wrdata = object
? weak_ref_data_ref (weak_ref_data_get (object))
: NULL;
_weak_ref_unlock (weak_ref);
object = weak_ref->priv.p;
if (!wrdata)
{
/* There is no @wrdata and no object. We are done. */
return NULL;
}
if (object)
object = object_ref (object, &toggle_notify, &toggle_data);
retry:
g_rw_lock_reader_unlock (&weak_locations_lock);
/* Now proceed to get the strong reference. This time with acquiring a lock
* on the per-object @wrdata and on @weak_ref.
*
* As the order in which locks are taken is important, we previously had to
* get a _weak_ref_lock(), to obtain the @wrdata. Now we have to lock on the
* @wrdata first, and the @weak_ref again. */
weak_ref_data_lock (wrdata);
_weak_ref_lock (weak_ref, &object);
if (!object)
{
/* Object is gone in the meantime. That is fine. */
new_wrdata = NULL;
}
else
{
/* Check that @object still refers to the same object as before. We do
* that by comparing the @wrdata object. A GObject keeps its (unique!)
* wrdata instance until the end, and since @wrdata is still alive,
* @object is the same as before, if-and-only-if its @wrdata is the same.
*/
if (weak_ref_data_has (object, wrdata, &new_wrdata))
{
/* We are (still) good. Take a strong ref while holding the necessary locks. */
object = object_ref (object, &toggle_notify, &toggle_data);
}
else
{
/* The @object changed and has no longer the same @wrdata. In this
* case, we need to start over.
*
* Note that @new_wrdata references the wrdata of the now current
* @object. We will use that during the retry. */
}
}
_weak_ref_unlock (weak_ref);
weak_ref_data_unlock (wrdata);
weak_ref_data_unref (wrdata);
if (new_wrdata)
{
/* There was a race. The object changed. Retry, with @new_wrdata. */
wrdata = new_wrdata;
goto retry;
}
if (toggle_notify)
toggle_notify (toggle_data, object, FALSE);
@ -5137,35 +5610,6 @@ g_weak_ref_get (GWeakRef *weak_ref)
return object;
}
static void
weak_locations_free_unlocked (GSList **weak_locations)
{
if (*weak_locations)
{
GSList *weak_location;
for (weak_location = *weak_locations; weak_location;)
{
GWeakRef *weak_ref_location = weak_location->data;
weak_ref_location->priv.p = NULL;
weak_location = g_slist_delete_link (weak_location, weak_location);
}
}
g_free (weak_locations);
}
static void
weak_locations_free (gpointer data)
{
GSList **weak_locations = data;
g_rw_lock_writer_lock (&weak_locations_lock);
weak_locations_free_unlocked (weak_locations);
g_rw_lock_writer_unlock (&weak_locations_lock);
}
/**
* g_weak_ref_set: (skip)
* @weak_ref: location for a weak reference
@ -5181,82 +5625,10 @@ weak_locations_free (gpointer data)
*/
void
g_weak_ref_set (GWeakRef *weak_ref,
gpointer object)
gpointer object)
{
GSList **weak_locations;
GObject *new_object;
GObject *old_object;
g_return_if_fail (weak_ref != NULL);
g_return_if_fail (object == NULL || G_IS_OBJECT (object));
new_object = object;
g_rw_lock_writer_lock (&weak_locations_lock);
/* We use the extra level of indirection here so that if we have ever
* had a weak pointer installed at any point in time on this object,
* we can see that there is a non-NULL value associated with the
* weak-pointer quark and know that this value will not change at any
* point in the object's lifetime.
*
* Both properties are important for reducing the amount of times we
* need to acquire locks and for decreasing the duration of time the
* lock is held while avoiding some rather tricky races.
*
* Specifically: we can avoid having to do an extra unconditional lock
* in g_object_unref() without worrying about some extremely tricky
* races.
*/
old_object = weak_ref->priv.p;
if (new_object != old_object)
{
weak_ref->priv.p = new_object;
/* Remove the weak ref from the old object */
if (old_object != NULL)
{
weak_locations = g_datalist_id_get_data (&old_object->qdata, quark_weak_locations);
if (weak_locations == NULL)
{
g_critical ("unexpected missing GWeakRef");
}
else
{
*weak_locations = g_slist_remove (*weak_locations, weak_ref);
if (!*weak_locations)
{
weak_locations_free_unlocked (weak_locations);
g_datalist_id_remove_no_notify (&old_object->qdata, quark_weak_locations);
}
}
}
/* Add the weak ref to the new object */
if (new_object != NULL)
{
if (g_atomic_int_get (&new_object->ref_count) < 1)
{
weak_ref->priv.p = NULL;
g_rw_lock_writer_unlock (&weak_locations_lock);
g_critical ("calling g_weak_ref_set() with already destroyed object");
return;
}
weak_locations = g_datalist_id_get_data (&new_object->qdata, quark_weak_locations);
if (weak_locations == NULL)
{
weak_locations = g_new0 (GSList *, 1);
g_datalist_id_set_data_full (&new_object->qdata, quark_weak_locations,
weak_locations, weak_locations_free);
}
*weak_locations = g_slist_prepend (*weak_locations, weak_ref);
}
}
g_rw_lock_writer_unlock (&weak_locations_lock);
_weak_ref_set (weak_ref, object, FALSE);
}