WIP: gobject: use per-object bit-lock instead of global RWLock for GWeakRef

WIP: tests still failing.

Replace the global RWLock with a per-object locking.

g_object_unref() needs to take a lock for GWeakRef while decrementing
the reference count to zero. That is also the case, when no weak refs
actually are registered because we cannot know that in a race free
manner without taking the lock. Replace the global RWLock with a
per-object bit lock.

Now there are actually two locks. The per-object lock
OPTIONAL2_BIT_LOCK_G_WEAK_REF and a bit lock WEAK_REF_LOCK on
`&weak_ref->priv.p`.

The object lock OPTIONAL2_BIT_LOCK_G_WEAK_REF must be taken first, we
cannot take it after having a WEAK_REF_LOCK lock. This prevents dead
locks.

Downsides:

- this requires to grow the GObject size (on 64 bytes) to add a
  GObjectPrivate for optional2_flags. We cannot use the bitlock
  on optional_flags, because while holding _weak_ref_lock(), we
  need to be able to call g_object_ref(). g_object_ref() locks on
  OPTIONAL_BIT_LOCK_TOGGLE_REFS. If OPTIONAL2_BIT_LOCK_G_WEAK_REF were on
  the same "optional_flags" as OPTIONAL_BIT_LOCK_TOGGLE_REFS, it would
  mean we have cases where we take

     OPTIONAL2_BIT_LOCK_G_WEAK_REF && _weak_ref_lock()

  and cases where we take

    _weak_ref_lock() && OPTIONAL_BIT_LOCK_TOGGLE_REFS

  That would deadlock. We need distinct logs for that, and need another
  int field in GObjectPrivate for the bitlock OPTIONAL2_BIT_LOCK_G_WEAK_REF.

- now `g_weak_ref_set()` also temporarily raises the ref count on the
  old object. That is because we must keep the old object alive to take
  a OPTIONAL2_BIT_LOCK_G_WEAK_REF lock. Taking and dropping references
  emit toggle notifications, which is a visible change in behavior.
This commit is contained in:
Thomas Haller 2023-12-20 09:55:32 +01:00
parent f70b3a91d3
commit dcd6c000ed

View File

@ -109,6 +109,10 @@ enum {
#define OPTIONAL_FLAG_HAS_NOTIFY_HANDLER (1 << 2) /* Same, specifically for "notify" */
#define OPTIONAL_FLAG_LOCK (1 << 3) /* _OPTIONAL_BIT_LOCK */
#define _OPTIONAL2_BIT_LOCK 0
#define OPTIONAL2_FLAG_LOCK (1 << 0) /* _OPTIONAL2_BIT_LOCK */
/* We use g_bit_lock(), which only supports one lock per integer.
*
* Hence, while we have locks for different purposes, internally they all
@ -128,26 +132,24 @@ enum {
#define OPTIONAL_BIT_LOCK_TOGGLE_REFS 3
#define OPTIONAL_BIT_LOCK_CLOSURE_ARRAY 4
/* These lock types are for optional2_flags and object_bit_lock2(). */
#define OPTIONAL2_BIT_LOCK_G_WEAK_REF 1
#if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P >= 8
#define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 1
#else
#define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 0
#endif
/* For now we only create a private struct if we don't have optional flags in
* GObject. Currently we don't need it otherwise. In the future we might
* always add a private struct. */
#define HAVE_PRIVATE (!HAVE_OPTIONAL_FLAGS_IN_GOBJECT)
#if HAVE_PRIVATE
typedef struct {
typedef struct
{
#if !HAVE_OPTIONAL_FLAGS_IN_GOBJECT
guint optional_flags; /* (atomic) */
#endif
guint optional2_flags; /* (atomic) */
} GObjectPrivate;
static int GObject_private_offset;
#endif
typedef struct
{
@ -165,6 +167,10 @@ G_STATIC_ASSERT(sizeof(GObject) == sizeof(GObjectReal));
G_STATIC_ASSERT(G_STRUCT_OFFSET(GObject, ref_count) == G_STRUCT_OFFSET(GObjectReal, ref_count));
G_STATIC_ASSERT(G_STRUCT_OFFSET(GObject, qdata) == G_STRUCT_OFFSET(GObjectReal, qdata));
typedef struct
{
GSList *list;
} GWeakRefList;
/* --- prototypes --- */
static void g_object_base_class_init (GObjectClass *class);
@ -209,7 +215,6 @@ static guint object_floating_flag_handler (GObject *object,
static void object_interface_check_properties (gpointer check_data,
gpointer g_iface);
static void weak_locations_free_unlocked (GSList **weak_locations);
/* --- typedefs --- */
typedef struct _GObjectNotifyQueue GObjectNotifyQueue;
@ -229,17 +234,13 @@ static GQuark quark_notify_queue;
static GParamSpecPool *pspec_pool = NULL;
static gulong gobject_signals[LAST_SIGNAL] = { 0, };
static guint (*floating_flag_handler) (GObject*, gint) = object_floating_flag_handler;
/* qdata pointing to GSList<GWeakRef *>, protected by weak_locations_lock */
static GQuark quark_weak_locations = 0;
static GRWLock weak_locations_lock;
#if HAVE_PRIVATE
G_ALWAYS_INLINE static inline GObjectPrivate *
g_object_get_instance_private (GObject *object)
{
return G_STRUCT_MEMBER_P (object, GObject_private_offset);
}
#endif
G_ALWAYS_INLINE static inline guint *
object_get_optional_flags_p (GObject *object)
@ -288,6 +289,29 @@ object_bit_unlock (GObject *object, guint lock_bit)
g_bit_unlock ((gint *) object_get_optional_flags_p (object), _OPTIONAL_BIT_LOCK);
}
static void
object_bit_lock2 (GObject *object, guint lock_bit)
{
#if defined(G_ENABLE_DEBUG)
/* Only OPTIONAL2_BIT_LOCK_G_WEAK_REF is supported. Also, will take this lock
* nested (on two objects) during _weak_ref_set(). We thus cannot use
* something like _object_bit_is_locked for asserting. */
g_assert (lock_bit == OPTIONAL2_BIT_LOCK_G_WEAK_REF);
#endif
if (object)
g_bit_lock ((gint *) &g_object_get_instance_private (object)->optional2_flags, _OPTIONAL2_BIT_LOCK);
}
static void
object_bit_unlock2 (GObject *object, guint lock_bit)
{
#if defined(G_ENABLE_DEBUG)
g_assert (lock_bit == OPTIONAL2_BIT_LOCK_G_WEAK_REF);
#endif
if (object)
g_bit_unlock ((gint *) &g_object_get_instance_private (object)->optional2_flags, _OPTIONAL2_BIT_LOCK);
}
/* --- functions --- */
static void
g_object_notify_queue_free (gpointer data)
@ -542,10 +566,8 @@ _g_object_type_init (void)
}
#endif /* G_ENABLE_DEBUG */
#if HAVE_PRIVATE
GObject_private_offset =
g_type_add_instance_private (G_TYPE_OBJECT, sizeof (GObjectPrivate));
#endif
}
/* Initialize the global GParamSpecPool; this function needs to be
@ -678,9 +700,7 @@ g_object_do_class_init (GObjectClass *class)
*/
g_type_add_interface_check (NULL, object_interface_check_properties);
#if HAVE_PRIVATE
g_type_class_adjust_private_offset (class, &GObject_private_offset);
#endif
}
/* Sinks @pspec if its a floating ref. */
@ -1434,6 +1454,11 @@ g_object_finalize (GObject *object)
"and must be removed with g_object_ref_sink().",
G_OBJECT_TYPE_NAME (object), object);
}
/* At this point we really should not have any weak-locations anymore. We
* are about to g_datalist_clear() them, would we need a lock for
* OPTIONAL2_BIT_LOCK_G_WEAK_REF? */
g_assert (!g_datalist_id_get_data (&object->qdata, quark_weak_locations));
#endif
g_datalist_clear (&object->qdata);
@ -1478,7 +1503,11 @@ g_object_run_dispose (GObject *object)
TRACE (GOBJECT_OBJECT_DISPOSE(object,G_TYPE_FROM_INSTANCE(object), 0));
G_OBJECT_GET_CLASS (object)->dispose (object);
TRACE (GOBJECT_OBJECT_DISPOSE_END(object,G_TYPE_FROM_INSTANCE(object), 0));
object_bit_lock2 (object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
g_datalist_id_remove_data (&object->qdata, quark_weak_locations);
object_bit_unlock2 (object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
g_object_unref (object);
}
@ -3841,68 +3870,39 @@ gpointer
static gboolean
_object_unref_clear_weak_locations (GObject *object, gint *p_old_ref, gboolean do_unref)
{
GSList **weak_locations;
gboolean success;
if (do_unref)
{
gboolean unreffed = FALSE;
/* The final unref. Under lock, we try to decrement the ref count
* from 1 to 0. On success, we clear weak locations. */
/* Fast path for the final unref using a read-lck only. We check whether
* we have weak_locations and drop ref count to zero under a reader lock. */
object_bit_lock2 (object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
g_rw_lock_reader_lock (&weak_locations_lock);
weak_locations = g_datalist_id_get_data (&object->qdata, quark_weak_locations);
if (!weak_locations)
{
unreffed = g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
success = g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
1, 0,
p_old_ref);
g_rw_lock_reader_unlock (&weak_locations_lock);
return unreffed;
if (success)
g_datalist_id_remove_data (&object->qdata, quark_weak_locations);
object_bit_unlock2 (object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
}
g_rw_lock_reader_unlock (&weak_locations_lock);
/* We have weak-locations. Note that we are here already after dispose(). That
* means, during dispose a GWeakRef was registered (very unusual). */
g_rw_lock_writer_lock (&weak_locations_lock);
if (!g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
1, 0,
p_old_ref))
else
{
g_rw_lock_writer_unlock (&weak_locations_lock);
return FALSE;
}
weak_locations = g_datalist_id_remove_no_notify (&object->qdata, quark_weak_locations);
g_clear_pointer (&weak_locations, weak_locations_free_unlocked);
g_rw_lock_writer_unlock (&weak_locations_lock);
return TRUE;
}
weak_locations = g_datalist_id_get_data (&object->qdata, quark_weak_locations);
if (weak_locations != NULL)
if (g_datalist_id_get_data (&object->qdata, quark_weak_locations))
{
g_rw_lock_writer_lock (&weak_locations_lock);
object_bit_lock2 (object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
/* Must check whether the ref-count is still 1, under lock. */
*p_old_ref = g_atomic_int_get (&object->ref_count);
if (*p_old_ref != 1)
success = (*p_old_ref == 1);
if (success)
g_datalist_id_remove_data (&object->qdata, quark_weak_locations);
object_bit_unlock2 (object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
}
else
{
g_rw_lock_writer_unlock (&weak_locations_lock);
return FALSE;
}
weak_locations = g_datalist_id_remove_no_notify (&object->qdata, quark_weak_locations);
g_clear_pointer (&weak_locations, weak_locations_free_unlocked);
g_rw_lock_writer_unlock (&weak_locations_lock);
return TRUE;
}
/* We don't need to re-fetch p_old_ref or check that it's still 1. The caller
* did that already. We are good.
*
@ -3911,7 +3911,11 @@ _object_unref_clear_weak_locations (GObject *object, gint *p_old_ref, gboolean d
* If there is a race at this point, the same race can happen between
* _object_unref_clear_weak_locations() and dispose() call. That is handled
* just fine. */
return TRUE;
success = TRUE;
}
}
return success;
}
/**
@ -5064,6 +5068,170 @@ g_initially_unowned_class_init (GInitiallyUnownedClass *klass)
* without first having or creating a strong reference to the object.
*/
#define WEAK_REF_LOCK 0
static GObject *
_weak_ref_object_from_locked_pointer (gpointer ptr)
{
return g_pointer_bit_lock_mask_ptr (ptr, WEAK_REF_LOCK, FALSE, 0, NULL);
}
static void
_weak_ref_lock (GWeakRef *weak_ref, GObject **out_object)
{
g_pointer_bit_lock (&weak_ref->priv.p, WEAK_REF_LOCK);
if (out_object) {
/* Let's read "weak_ref->priv.p" without atomic. We just set a bitlock (using
* atomics), nobody is changing this value. */
*out_object = _weak_ref_object_from_locked_pointer (weak_ref->priv.p);
}
}
static void
_weak_ref_unlock (GWeakRef *weak_ref)
{
g_pointer_bit_unlock (&weak_ref->priv.p, WEAK_REF_LOCK);
}
static void
_weak_ref_unlock_and_set (GWeakRef *weak_ref, GObject *object)
{
g_pointer_bit_unlock_and_set (&weak_ref->priv.p, WEAK_REF_LOCK, object, 0);
}
static void
_weak_ref_list_free (GWeakRefList *weak_ref_list)
{
/* This must be called with a OPTIONAL2_BIT_LOCK_G_WEAK_REF lock hold. Note
* that this function is the destroy notify for quark_weak_locations data. We
* will always explicitly call
*
* g_datalist_id_remove_data (&object->qdata, quark_weak_locations);
*
* while holding a OPTIONAL2_BIT_LOCK_G_WEAK_REF. Only during finalize
* (g_datalist_clear()) we don't. But then the qdata is expected not to be
* set. */
while (weak_ref_list->list)
{
GWeakRef *weak_ref = weak_ref_list->list->data;
weak_ref_list->list = g_slist_remove (weak_ref_list->list, weak_ref);
_weak_ref_lock (weak_ref, NULL);
_weak_ref_unlock_and_set (weak_ref, NULL);
}
g_free_sized (weak_ref_list, sizeof (GWeakRefList));
}
static void
_weak_ref_set (GWeakRef *weak_ref,
GObject *new_object,
gboolean is_initially_null)
{
GWeakRefList *weak_ref_list;
GObject *old_object_unlock;
GObject *old_object_real;
GObject *old_object;
if (is_initially_null)
{
/* The caller indicates that the weak-ref is set to NULL. Don't check. */
old_object = NULL;
}
else
{
/* We must take a lock on old_object. For that, we have to first get a
* reference to it. */
old_object = g_weak_ref_get (weak_ref);
}
/* We need lock on both objects and the weak_ref. The order of the locks
* matters so we don't deadlock. */
if (new_object && old_object && (((guintptr) (gpointer) old_object) < ((guintptr) ((gpointer) new_object))))
{
/* To avoid deadlocks, we sort the objects by pointer value. */
object_bit_lock2 (old_object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
object_bit_lock2 (new_object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
old_object_unlock = old_object;
}
else
{
object_bit_lock2 (new_object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
if (new_object != old_object)
{
object_bit_lock2 (old_object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
old_object_unlock = old_object;
}
else
old_object_unlock = NULL;
}
_weak_ref_lock (weak_ref, &old_object_real);
if (old_object_real != old_object)
{
/* A race. old_object_real is not the expected old_object due to a
* concurrent g_weak_ref_set() call. */
if (old_object_real)
{
/* We lost the race and are done. */
object_bit_unlock2 (old_object_unlock, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
object_bit_unlock2 (new_object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
_weak_ref_unlock (weak_ref);
g_clear_object (&old_object);
return;
}
/* old_object_real is NULL, which is unexpected but OK. Proceed to set. */
}
if (old_object_real && new_object != old_object_real)
{
weak_ref_list = g_datalist_id_get_data (&old_object_real->qdata, quark_weak_locations);
if (!weak_ref_list)
{
g_critical ("unexpected missing GWeakRefList");
}
else
{
weak_ref_list->list = g_slist_remove (weak_ref_list->list, weak_ref);
if (!weak_ref_list->list)
g_datalist_id_remove_data (&old_object_real->qdata, quark_weak_locations);
}
}
object_bit_unlock2 (old_object_unlock, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
if (new_object && new_object != old_object_real)
{
if (g_atomic_int_get (&new_object->ref_count) < 1)
{
g_critical ("calling g_weak_ref_set() with already destroyed object");
new_object = NULL;
}
else
{
weak_ref_list = g_datalist_id_get_data (&new_object->qdata, quark_weak_locations);
if (!weak_ref_list)
{
weak_ref_list = g_new0 (GWeakRefList, 1);
g_datalist_id_set_data_full (&new_object->qdata, quark_weak_locations,
weak_ref_list, (GDestroyNotify) _weak_ref_list_free);
}
weak_ref_list->list = g_slist_prepend (weak_ref_list->list, weak_ref);
}
}
_weak_ref_unlock_and_set (weak_ref, new_object);
object_bit_unlock2 (new_object, OPTIONAL2_BIT_LOCK_G_WEAK_REF);
/* g_object_unref() emits callbacks. Can only unref now after all locks are
* released. */
g_clear_object (&old_object);
}
/**
* g_weak_ref_init: (skip)
* @weak_ref: (inout): uninitialized or empty location for a weak
@ -5086,10 +5254,17 @@ void
g_weak_ref_init (GWeakRef *weak_ref,
gpointer object)
{
weak_ref->priv.p = NULL;
g_return_if_fail (weak_ref);
g_return_if_fail (object == NULL || G_IS_OBJECT (object));
g_atomic_pointer_set (&weak_ref->priv.p, NULL);
if (object)
g_weak_ref_set (weak_ref, object);
{
/* We give a hint that the weak_ref is currently NULL. Unlike
* g_weak_ref_set(), we then don't need extra locks just to
* find out that we have no object. */
_weak_ref_set (weak_ref, object, TRUE);
}
}
/**
@ -5142,14 +5317,12 @@ g_weak_ref_get (GWeakRef *weak_ref)
g_return_val_if_fail (weak_ref, NULL);
g_rw_lock_reader_lock (&weak_locations_lock);
object = weak_ref->priv.p;
_weak_ref_lock (weak_ref, &object);
if (object)
object = object_ref (object, &toggle_notify, &toggle_data);
g_rw_lock_reader_unlock (&weak_locations_lock);
_weak_ref_unlock (weak_ref);
if (toggle_notify)
toggle_notify (toggle_data, object, FALSE);
@ -5157,35 +5330,6 @@ g_weak_ref_get (GWeakRef *weak_ref)
return object;
}
static void
weak_locations_free_unlocked (GSList **weak_locations)
{
if (*weak_locations)
{
GSList *weak_location;
for (weak_location = *weak_locations; weak_location;)
{
GWeakRef *weak_ref_location = weak_location->data;
weak_ref_location->priv.p = NULL;
weak_location = g_slist_delete_link (weak_location, weak_location);
}
}
g_free (weak_locations);
}
static void
weak_locations_free (gpointer data)
{
GSList **weak_locations = data;
g_rw_lock_writer_lock (&weak_locations_lock);
weak_locations_free_unlocked (weak_locations);
g_rw_lock_writer_unlock (&weak_locations_lock);
}
/**
* g_weak_ref_set: (skip)
* @weak_ref: location for a weak reference
@ -5203,80 +5347,8 @@ void
g_weak_ref_set (GWeakRef *weak_ref,
gpointer object)
{
GSList **weak_locations;
GObject *new_object;
GObject *old_object;
g_return_if_fail (weak_ref != NULL);
g_return_if_fail (object == NULL || G_IS_OBJECT (object));
new_object = object;
g_rw_lock_writer_lock (&weak_locations_lock);
/* We use the extra level of indirection here so that if we have ever
* had a weak pointer installed at any point in time on this object,
* we can see that there is a non-NULL value associated with the
* weak-pointer quark and know that this value will not change at any
* point in the object's lifetime.
*
* Both properties are important for reducing the amount of times we
* need to acquire locks and for decreasing the duration of time the
* lock is held while avoiding some rather tricky races.
*
* Specifically: we can avoid having to do an extra unconditional lock
* in g_object_unref() without worrying about some extremely tricky
* races.
*/
old_object = weak_ref->priv.p;
if (new_object != old_object)
{
weak_ref->priv.p = new_object;
/* Remove the weak ref from the old object */
if (old_object != NULL)
{
weak_locations = g_datalist_id_get_data (&old_object->qdata, quark_weak_locations);
if (weak_locations == NULL)
{
g_critical ("unexpected missing GWeakRef");
}
else
{
*weak_locations = g_slist_remove (*weak_locations, weak_ref);
if (!*weak_locations)
{
weak_locations_free_unlocked (weak_locations);
g_datalist_id_remove_no_notify (&old_object->qdata, quark_weak_locations);
}
}
}
/* Add the weak ref to the new object */
if (new_object != NULL)
{
if (g_atomic_int_get (&new_object->ref_count) < 1)
{
weak_ref->priv.p = NULL;
g_rw_lock_writer_unlock (&weak_locations_lock);
g_critical ("calling g_weak_ref_set() with already destroyed object");
return;
}
weak_locations = g_datalist_id_get_data (&new_object->qdata, quark_weak_locations);
if (weak_locations == NULL)
{
weak_locations = g_new0 (GSList *, 1);
g_datalist_id_set_data_full (&new_object->qdata, quark_weak_locations,
weak_locations, weak_locations_free);
}
*weak_locations = g_slist_prepend (*weak_locations, weak_ref);
}
}
g_rw_lock_writer_unlock (&weak_locations_lock);
_weak_ref_set (weak_ref, object, FALSE);
}