From 787861761d5a40a57652305de91ac6c3080b0f9a Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Sat, 23 Dec 2023 19:55:27 +0100 Subject: [PATCH 01/10] gobject/tests/reference: add test for deadlock related to GWeakRef and toggle reference GWeakRef calls g_object_ref() while holding a read lock. g_object_ref() can emit a toggle notification. If you then inside the toggle notification setup a GWeakRef (to the same or another object), the code will try to get a write lock. Deadlock will happen. Add a test to "show" that (the breaking part is commented out). Will be fixed next. --- gobject/tests/reference.c | 51 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/gobject/tests/reference.c b/gobject/tests/reference.c index 54b476834..90f5d3917 100644 --- a/gobject/tests/reference.c +++ b/gobject/tests/reference.c @@ -719,6 +719,56 @@ test_weak_ref_on_toggle_notify (void) g_assert_null (g_weak_ref_get (&weak)); } +static void +weak_ref_in_toggle_notify_toggle_cb (gpointer data, + GObject *object, + gboolean is_last_ref) +{ + GWeakRef weak2; + GObject *obj2; + + if (is_last_ref) + return; + + /* We just got a second ref, while calling g_weak_ref_get(). + * At this point, we hold a lock for the weak ref. + * + * FIXME: currently we would dead lock with the lines below. + */ + return; + + g_weak_ref_init (&weak2, object); + g_assert_true (object == g_weak_ref_get (&weak2)); + g_object_unref (object); + + obj2 = g_object_new (G_TYPE_OBJECT, NULL); + g_weak_ref_set (&weak2, obj2); + g_object_unref (obj2); + + g_assert_null (g_weak_ref_get (&weak2)); +} + +static void +test_weak_ref_in_toggle_notify (void) +{ + GObject *obj; + GWeakRef weak = { { GUINT_TO_POINTER (0xDEADBEEFU) } }; + + obj = g_object_new (G_TYPE_OBJECT, NULL); + g_object_add_toggle_ref (obj, weak_ref_in_toggle_notify_toggle_cb, NULL); + g_object_unref (obj); + + g_weak_ref_init (&weak, obj); + + /* We trigger a toggle notify via g_weak_ref_get(). */ + g_assert_true (g_weak_ref_get (&weak) == obj); + + g_object_remove_toggle_ref (obj, weak_ref_in_toggle_notify_toggle_cb, NULL); + g_object_unref (obj); + + g_assert_null (g_weak_ref_get (&weak)); +} + typedef struct { gboolean should_be_last; @@ -1365,6 +1415,7 @@ main (int argc, char **argv) g_test_add_func ("/object/weak-ref/on-dispose", test_weak_ref_on_dispose); g_test_add_func ("/object/weak-ref/on-run-dispose", test_weak_ref_on_run_dispose); g_test_add_func ("/object/weak-ref/on-toggle-notify", test_weak_ref_on_toggle_notify); + g_test_add_func ("/object/weak-ref/in-toggle-notify", test_weak_ref_in_toggle_notify); g_test_add_func ("/object/toggle-ref", test_toggle_ref); g_test_add_func ("/object/toggle-ref/ref-on-dispose", test_toggle_ref_on_dispose); g_test_add_func ("/object/toggle-ref/ref-and-notify-on-dispose", test_toggle_ref_and_notify_on_dispose); From 6c389738d313ee04919a935c417d891a1479fd90 Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Sun, 24 Dec 2023 07:58:51 +0100 Subject: [PATCH 02/10] gobject/tests/reference: add tests for concurrent g_weak_ref_set() GWeakRef methods are thread safe. Add test for calling g_weak_ref_set() concurrently. --- gobject/tests/reference.c | 172 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 172 insertions(+) diff --git a/gobject/tests/reference.c b/gobject/tests/reference.c index 90f5d3917..7b73eb725 100644 --- a/gobject/tests/reference.c +++ b/gobject/tests/reference.c @@ -769,6 +769,176 @@ test_weak_ref_in_toggle_notify (void) g_assert_null (g_weak_ref_get (&weak)); } +/*****************************************************************************/ + +#define CONCURRENT_N_OBJS 5 +#define CONCURRENT_N_THREADS 5 +#define CONCURRENT_N_RACES 100 + +typedef struct +{ + int TEST_IDX; + GObject *objs[CONCURRENT_N_OBJS]; + int thread_done[CONCURRENT_N_THREADS]; +} ConcurrentData; + +typedef struct +{ + const ConcurrentData *data; + int idx; + int race_count; + GWeakRef *weak_ref; + GRand *rnd; +} ConcurrentThreadData; + +static gpointer +_test_weak_ref_concurrent_thread_cb (gpointer data) +{ + ConcurrentThreadData *thread_data = data; + + while (TRUE) + { + gboolean all_done; + int i; + int r; + + for (r = 0; r < 15; r++) + { + GObject *obj_allocated = NULL; + GObject *obj; + GObject *obj2; + gboolean got_race; + + /* Choose a random object */ + obj = thread_data->data->objs[g_rand_int (thread_data->rnd) % CONCURRENT_N_OBJS]; + if (thread_data->data->TEST_IDX > 0 && (g_rand_int (thread_data->rnd) % 4 == 0)) + { + /* With TEST_IDX>0 also randomly choose NULL or a newly created + * object. */ + if (g_rand_boolean (thread_data->rnd)) + obj = NULL; + else + { + obj_allocated = g_object_new (G_TYPE_OBJECT, NULL); + obj = obj_allocated; + } + } + + g_assert (!obj || G_IS_OBJECT (obj)); + + g_weak_ref_set (thread_data->weak_ref, obj); + + /* get the weak-ref. If there is no race, we expect to get the same + * object back. */ + obj2 = g_weak_ref_get (thread_data->weak_ref); + + g_assert (!obj2 || G_IS_OBJECT (obj2)); + if (!obj2) + { + g_assert (thread_data->data->TEST_IDX > 0); + } + if (obj != obj2) + { + int cnt; + + cnt = 0; + for (i = 0; i < CONCURRENT_N_OBJS; i++) + { + if (obj2 == thread_data->data->objs[i]) + cnt++; + } + if (!obj2) + g_assert_cmpint (cnt, ==, 0); + else if (obj2 && obj2 == obj_allocated) + g_assert_cmpint (cnt, ==, 0); + else if (thread_data->data->TEST_IDX > 0) + g_assert_cmpint (cnt, <=, 1); + else + g_assert_cmpint (cnt, ==, 1); + got_race = TRUE; + } + else + got_race = FALSE; + + g_clear_object (&obj2); + g_clear_object (&obj_allocated); + + if (got_race) + { + /* Each thread should see CONCURRENT_N_RACES before being done. + * Count them. */ + if (g_atomic_int_get (&thread_data->race_count) > CONCURRENT_N_RACES) + g_atomic_int_set (&thread_data->data->thread_done[thread_data->idx], 1); + else + g_atomic_int_add (&thread_data->race_count, 1); + } + } + + /* Each thread runs, until all threads saw the expected number of races. */ + all_done = TRUE; + for (i = 0; i < CONCURRENT_N_THREADS; i++) + { + if (!g_atomic_int_get (&thread_data->data->thread_done[i])) + { + all_done = FALSE; + break; + } + } + if (all_done) + return GINT_TO_POINTER (1); + } +} + +static void +test_weak_ref_concurrent (gconstpointer testdata) +{ + const int TEST_IDX = GPOINTER_TO_INT (testdata); + GThread *threads[CONCURRENT_N_THREADS]; + int i; + ConcurrentData data = { + .TEST_IDX = TEST_IDX, + }; + ConcurrentThreadData thread_data[CONCURRENT_N_THREADS]; + GWeakRef weak_ref = { 0 }; + + /* Let several threads call g_weak_ref_set() & g_weak_ref_get() in a loop. */ + + for (i = 0; i < CONCURRENT_N_OBJS; i++) + data.objs[i] = g_object_new (G_TYPE_OBJECT, NULL); + + for (i = 0; i < CONCURRENT_N_THREADS; i++) + { + const guint32 rnd_seed[] = { + g_test_rand_int (), + g_test_rand_int (), + g_test_rand_int (), + }; + + thread_data[i] = (ConcurrentThreadData){ + .idx = i, + .data = &data, + .weak_ref = &weak_ref, + .rnd = g_rand_new_with_seed_array (rnd_seed, G_N_ELEMENTS (rnd_seed)), + }; + threads[i] = g_thread_new ("test-weak-ref-concurrent", _test_weak_ref_concurrent_thread_cb, &thread_data[i]); + } + + for (i = 0; i < CONCURRENT_N_THREADS; i++) + { + gpointer r; + + r = g_thread_join (g_steal_pointer (&threads[i])); + g_assert_cmpint (GPOINTER_TO_INT (r), ==, 1); + } + + for (i = 0; i < CONCURRENT_N_OBJS; i++) + g_object_unref (g_steal_pointer (&data.objs[i])); + for (i = 0; i < CONCURRENT_N_THREADS; i++) + g_rand_free (g_steal_pointer (&thread_data[i].rnd)); +} + +/*****************************************************************************/ + typedef struct { gboolean should_be_last; @@ -1416,6 +1586,8 @@ main (int argc, char **argv) g_test_add_func ("/object/weak-ref/on-run-dispose", test_weak_ref_on_run_dispose); g_test_add_func ("/object/weak-ref/on-toggle-notify", test_weak_ref_on_toggle_notify); g_test_add_func ("/object/weak-ref/in-toggle-notify", test_weak_ref_in_toggle_notify); + g_test_add_data_func ("/object/weak-ref/concurrent/0", GINT_TO_POINTER (0), test_weak_ref_concurrent); + g_test_add_data_func ("/object/weak-ref/concurrent/1", GINT_TO_POINTER (1), test_weak_ref_concurrent); g_test_add_func ("/object/toggle-ref", test_toggle_ref); g_test_add_func ("/object/toggle-ref/ref-on-dispose", test_toggle_ref_on_dispose); g_test_add_func ("/object/toggle-ref/ref-and-notify-on-dispose", test_toggle_ref_and_notify_on_dispose); From a12f0731ddf2f84e96e44f08957b9ee6d4dd2f4a Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Thu, 21 Dec 2023 12:11:37 +0100 Subject: [PATCH 03/10] glib: add internal macro G_THREAD_LOCAL for support for thread local storage _Thread_local is also C11, so possibly other compilers would also support it. However, since not *all* compilers support it, it can anyway only be used as optimization and conditional asserts. As such, the current detection based on __GNUC__ to only support gcc (and clang) is good enough. --- glib/glib-private.h | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/glib/glib-private.h b/glib/glib-private.h index 50aa8a050..479ebb9df 100644 --- a/glib/glib-private.h +++ b/glib/glib-private.h @@ -321,4 +321,10 @@ GLibPrivateVTable *glib__private__ (void); gboolean g_uint_equal (gconstpointer v1, gconstpointer v2); guint g_uint_hash (gconstpointer v); +#if defined(__GNUC__) +#define G_THREAD_LOCAL __thread +#else +#undef G_THREAD_LOCAL +#endif + #endif /* __GLIB_PRIVATE_H__ */ From 777606d9c44d95f24d121bca81fc4e4fe0db4920 Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Wed, 20 Dec 2023 09:00:32 +0100 Subject: [PATCH 04/10] gobject: add private data for GObject and use it for optional flags Add a GObjectPrivate struct and let GObject have private data. On architectures where we have an alignment gap in GObject struct (64 bit), we use the gap for "optional_flags". Use the private data for those optional flags, on architectures where we don't have them. For now, private data is only added for those optional flags (and not on architectures, where the flags fit inside GObject). In the future, we may add additional fields there, and add the private struct always. The main purpose will be to replace all the global locks with per-object locks, and make "optional_flags" also available on 32bit. --- gobject/gobject.c | 112 +++++++++++++++++++++------------------------- 1 file changed, 51 insertions(+), 61 deletions(-) diff --git a/gobject/gobject.c b/gobject/gobject.c index fa499ff0a..0681c98c8 100644 --- a/gobject/gobject.c +++ b/gobject/gobject.c @@ -107,7 +107,24 @@ enum { #define OPTIONAL_FLAG_HAS_NOTIFY_HANDLER (1 << 2) /* Same, specifically for "notify" */ #if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P == 8 -#define HAVE_OPTIONAL_FLAGS +#define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 1 +#else +#define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 0 +#endif + +/* For now we only create a private struct if we don't have optional flags in + * GObject. Currently we don't need it otherwise. In the future we might + * always add a private struct. */ +#define HAVE_PRIVATE (!HAVE_OPTIONAL_FLAGS_IN_GOBJECT) + +#if HAVE_PRIVATE +typedef struct { +#if !HAVE_OPTIONAL_FLAGS_IN_GOBJECT + guint optional_flags; /* (atomic) */ +#endif +} GObjectPrivate; + +static int GObject_private_offset; #endif typedef struct @@ -116,7 +133,7 @@ typedef struct /*< private >*/ guint ref_count; /* (atomic) */ -#ifdef HAVE_OPTIONAL_FLAGS +#if HAVE_OPTIONAL_FLAGS_IN_GOBJECT guint optional_flags; /* (atomic) */ #endif GData *qdata; @@ -190,9 +207,6 @@ static GQuark quark_closure_array = 0; static GQuark quark_weak_notifies = 0; static GQuark quark_toggle_refs = 0; static GQuark quark_notify_queue; -#ifndef HAVE_OPTIONAL_FLAGS -static GQuark quark_in_construction; -#endif static GParamSpecPool *pspec_pool = NULL; static gulong gobject_signals[LAST_SIGNAL] = { 0, }; static guint (*floating_flag_handler) (GObject*, gint) = object_floating_flag_handler; @@ -202,6 +216,24 @@ static GRWLock weak_locations_lock; G_LOCK_DEFINE_STATIC(notify_lock); +#if HAVE_PRIVATE +G_ALWAYS_INLINE static inline GObjectPrivate * +g_object_get_instance_private (GObject *object) +{ + return G_STRUCT_MEMBER_P (object, GObject_private_offset); +} +#endif + +G_ALWAYS_INLINE static inline guint * +object_get_optional_flags_p (GObject *object) +{ +#if HAVE_OPTIONAL_FLAGS_IN_GOBJECT + return &(((GObjectReal *) object)->optional_flags); +#else + return &g_object_get_instance_private (object)->optional_flags; +#endif +} + /* --- functions --- */ static void g_object_notify_queue_free (gpointer data) @@ -455,6 +487,11 @@ _g_object_type_init (void) # endif /* G_HAS_CONSTRUCTORS */ } #endif /* G_ENABLE_DEBUG */ + +#if HAVE_PRIVATE + GObject_private_offset = + g_type_add_instance_private (G_TYPE_OBJECT, sizeof (GObjectPrivate)); +#endif } /* Initialize the global GParamSpecPool; this function needs to be @@ -529,9 +566,6 @@ g_object_do_class_init (GObjectClass *class) quark_weak_locations = g_quark_from_static_string ("GObject-weak-locations"); quark_toggle_refs = g_quark_from_static_string ("GObject-toggle-references"); quark_notify_queue = g_quark_from_static_string ("GObject-notify-queue"); -#ifndef HAVE_OPTIONAL_FLAGS - quark_in_construction = g_quark_from_static_string ("GObject-in-construction"); -#endif g_object_init_pspec_pool (); @@ -589,6 +623,10 @@ g_object_do_class_init (GObjectClass *class) * implement an interface implement all properties for that interface */ g_type_add_interface_check (NULL, object_interface_check_properties); + +#if HAVE_PRIVATE + g_type_class_adjust_private_offset (class, &GObject_private_offset); +#endif } /* Sinks @pspec if it’s a floating ref. */ @@ -1183,12 +1221,7 @@ g_object_interface_list_properties (gpointer g_iface, static inline guint object_get_optional_flags (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS - GObjectReal *real = (GObjectReal *)object; - return (guint)g_atomic_int_get (&real->optional_flags); -#else - return 0; -#endif + return g_atomic_int_get (object_get_optional_flags_p (object)); } /* Variant of object_get_optional_flags for when @@ -1198,21 +1231,14 @@ object_get_optional_flags (GObject *object) static inline guint object_get_optional_flags_X (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS - GObjectReal *real = (GObjectReal *)object; - return real->optional_flags; -#else - return 0; -#endif + return *object_get_optional_flags_p (object); } -#ifdef HAVE_OPTIONAL_FLAGS static inline void object_set_optional_flags (GObject *object, guint flags) { - GObjectReal *real = (GObjectReal *)object; - g_atomic_int_or (&real->optional_flags, flags); + g_atomic_int_or (object_get_optional_flags_p (object), flags); } /* Variant for when we have exclusive access @@ -1222,8 +1248,7 @@ static inline void object_set_optional_flags_X (GObject *object, guint flags) { - GObjectReal *real = (GObjectReal *)object; - real->optional_flags |= flags; + (*object_get_optional_flags_p (object)) |= flags; } /* Variant for when we have exclusive access @@ -1233,83 +1258,55 @@ static inline void object_unset_optional_flags_X (GObject *object, guint flags) { - GObjectReal *real = (GObjectReal *)object; - real->optional_flags &= ~flags; + (*object_get_optional_flags_p (object)) &= ~flags; } -#endif gboolean _g_object_has_signal_handler (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS return (object_get_optional_flags (object) & OPTIONAL_FLAG_HAS_SIGNAL_HANDLER) != 0; -#else - return TRUE; -#endif } static inline gboolean _g_object_has_notify_handler (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS return CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object)) || (object_get_optional_flags (object) & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0; -#else - return TRUE; -#endif } static inline gboolean _g_object_has_notify_handler_X (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS return CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object)) || (object_get_optional_flags_X (object) & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0; -#else - return TRUE; -#endif } void _g_object_set_has_signal_handler (GObject *object, guint signal_id) { -#ifdef HAVE_OPTIONAL_FLAGS guint flags = OPTIONAL_FLAG_HAS_SIGNAL_HANDLER; if (signal_id == gobject_signals[NOTIFY]) flags |= OPTIONAL_FLAG_HAS_NOTIFY_HANDLER; object_set_optional_flags (object, flags); -#endif } static inline gboolean object_in_construction (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS return (object_get_optional_flags (object) & OPTIONAL_FLAG_IN_CONSTRUCTION) != 0; -#else - return g_datalist_id_get_data (&object->qdata, quark_in_construction) != NULL; -#endif } static inline void set_object_in_construction (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS object_set_optional_flags_X (object, OPTIONAL_FLAG_IN_CONSTRUCTION); -#else - g_datalist_id_set_data (&object->qdata, quark_in_construction, object); -#endif } static inline void unset_object_in_construction (GObject *object) { -#ifdef HAVE_OPTIONAL_FLAGS object_unset_optional_flags_X (object, OPTIONAL_FLAG_IN_CONSTRUCTION); -#else - g_datalist_id_set_data (&object->qdata, quark_in_construction, NULL); -#endif } static void @@ -1498,9 +1495,7 @@ static inline void g_object_notify_by_spec_internal (GObject *object, GParamSpec *pspec) { -#ifdef HAVE_OPTIONAL_FLAGS guint object_flags; -#endif gboolean needs_notify; gboolean in_init; @@ -1509,16 +1504,11 @@ g_object_notify_by_spec_internal (GObject *object, param_spec_follow_override (&pspec); -#ifdef HAVE_OPTIONAL_FLAGS /* get all flags we need with a single atomic read */ object_flags = object_get_optional_flags (object); needs_notify = ((object_flags & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0) || CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object)); in_init = (object_flags & OPTIONAL_FLAG_IN_CONSTRUCTION) != 0; -#else - needs_notify = TRUE; - in_init = object_in_construction (object); -#endif if (pspec != NULL && needs_notify) { From bfb829231f41a906b6ff53160ce15cd9d9228c0b Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Wed, 20 Dec 2023 09:48:27 +0100 Subject: [PATCH 05/10] gobject: never access optional flags without atomic The optional flags should be used for bit locks. That means, we must only use atomic operations when updating the flags. Having a variant of _X methods that update the flags without locks (_X), means that we must take care not to take bit locks during construction. That is hard to get right. There is so much happening during object construction, that it's unclear when it's really safe to access the flags without atomic. Don't do this. --- gobject/gobject.c | 42 ++++++------------------------------------ 1 file changed, 6 insertions(+), 36 deletions(-) diff --git a/gobject/gobject.c b/gobject/gobject.c index 0681c98c8..029de8b22 100644 --- a/gobject/gobject.c +++ b/gobject/gobject.c @@ -1224,16 +1224,6 @@ object_get_optional_flags (GObject *object) return g_atomic_int_get (object_get_optional_flags_p (object)); } -/* Variant of object_get_optional_flags for when - * we know that we have exclusive access (during - * construction) - */ -static inline guint -object_get_optional_flags_X (GObject *object) -{ - return *object_get_optional_flags_p (object); -} - static inline void object_set_optional_flags (GObject *object, guint flags) @@ -1241,24 +1231,11 @@ object_set_optional_flags (GObject *object, g_atomic_int_or (object_get_optional_flags_p (object), flags); } -/* Variant for when we have exclusive access - * (during construction) - */ static inline void -object_set_optional_flags_X (GObject *object, - guint flags) -{ - (*object_get_optional_flags_p (object)) |= flags; -} - -/* Variant for when we have exclusive access - * (during construction) - */ -static inline void -object_unset_optional_flags_X (GObject *object, +object_unset_optional_flags (GObject *object, guint flags) { - (*object_get_optional_flags_p (object)) &= ~flags; + g_atomic_int_and (object_get_optional_flags_p (object), ~flags); } gboolean @@ -1274,13 +1251,6 @@ _g_object_has_notify_handler (GObject *object) (object_get_optional_flags (object) & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0; } -static inline gboolean -_g_object_has_notify_handler_X (GObject *object) -{ - return CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object)) || - (object_get_optional_flags_X (object) & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0; -} - void _g_object_set_has_signal_handler (GObject *object, guint signal_id) @@ -1300,13 +1270,13 @@ object_in_construction (GObject *object) static inline void set_object_in_construction (GObject *object) { - object_set_optional_flags_X (object, OPTIONAL_FLAG_IN_CONSTRUCTION); + object_set_optional_flags (object, OPTIONAL_FLAG_IN_CONSTRUCTION); } static inline void unset_object_in_construction (GObject *object) { - object_unset_optional_flags_X (object, OPTIONAL_FLAG_IN_CONSTRUCTION); + object_unset_optional_flags (object, OPTIONAL_FLAG_IN_CONSTRUCTION); } static void @@ -2168,7 +2138,7 @@ g_object_new_with_custom_constructor (GObjectClass *class, if (CLASS_HAS_PROPS (class)) { - if ((newly_constructed && _g_object_has_notify_handler_X (object)) || + if ((newly_constructed && _g_object_has_notify_handler (object)) || _g_object_has_notify_handler (object)) { /* This may or may not have been setup in g_object_init(). @@ -2218,7 +2188,7 @@ g_object_new_internal (GObjectClass *class, { GSList *node; - if (_g_object_has_notify_handler_X (object)) + if (_g_object_has_notify_handler (object)) { /* This may or may not have been setup in g_object_init(). * If it hasn't, we do it now. From c66880e46f762fcce25e536ce44c37899bd7adf2 Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Wed, 20 Dec 2023 09:54:21 +0100 Subject: [PATCH 06/10] gobject: use per-object bit-lock instead of global mutex for weak-refs --- gobject/gobject.c | 65 +++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 60 insertions(+), 5 deletions(-) diff --git a/gobject/gobject.c b/gobject/gobject.c index 029de8b22..0278f979d 100644 --- a/gobject/gobject.c +++ b/gobject/gobject.c @@ -102,9 +102,28 @@ enum { PROP_NONE }; +#define _OPTIONAL_BIT_LOCK 3 + #define OPTIONAL_FLAG_IN_CONSTRUCTION (1 << 0) #define OPTIONAL_FLAG_HAS_SIGNAL_HANDLER (1 << 1) /* Set if object ever had a signal handler */ #define OPTIONAL_FLAG_HAS_NOTIFY_HANDLER (1 << 2) /* Same, specifically for "notify" */ +#define OPTIONAL_FLAG_LOCK (1 << 3) /* _OPTIONAL_BIT_LOCK */ + +/* We use g_bit_lock(), which only supports one lock per integer. + * + * Hence, while we have locks for different purposes, internally they all + * map to the same bit lock (_OPTIONAL_BIT_LOCK). + * + * This means you cannot take a lock (object_bit_lock()) while already holding + * another bit lock. There is an assert against that with G_ENABLE_DEBUG + * builds (_object_bit_is_locked). + * + * In the past, we had different global mutexes per topic. Now we have one + * per-object mutex for several topics. The downside is that we are not as + * parallel as possible. The alternative would be to add individual locking + * integers to GObjectPrivate. But increasing memory usage for more parallelism + * (per-object!) is not worth it. */ +#define OPTIONAL_BIT_LOCK_WEAK_REFS 1 #if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P == 8 #define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 1 @@ -201,7 +220,6 @@ struct _GObjectNotifyQueue /* --- variables --- */ G_LOCK_DEFINE_STATIC (closure_array_mutex); -G_LOCK_DEFINE_STATIC (weak_refs_mutex); G_LOCK_DEFINE_STATIC (toggle_refs_mutex); static GQuark quark_closure_array = 0; static GQuark quark_weak_notifies = 0; @@ -234,6 +252,43 @@ object_get_optional_flags_p (GObject *object) #endif } +#if defined(G_ENABLE_DEBUG) && defined(G_THREAD_LOCAL) +/* Using this thread-local global is sufficient to guard the per-object + * locking, because while the current thread holds a lock on one object, it + * never calls out to another object (because doing so would would be prone to + * deadlock). */ +static G_THREAD_LOCAL guint _object_bit_is_locked; +#endif + +static void +object_bit_lock (GObject *object, guint lock_bit) +{ +#if defined(G_ENABLE_DEBUG) && defined(G_THREAD_LOCAL) + /* all object_bit_lock() really use the same bit/mutex. The "lock_bit" argument + * only exists for asserting. object_bit_lock() is not re-entrant (also not with + * different "lock_bit" values). */ + g_assert (lock_bit > 0); + g_assert (_object_bit_is_locked == 0); + _object_bit_is_locked = lock_bit; +#endif + + g_bit_lock ((gint *) object_get_optional_flags_p (object), _OPTIONAL_BIT_LOCK); +} + +static void +object_bit_unlock (GObject *object, guint lock_bit) +{ +#if defined(G_ENABLE_DEBUG) && defined(G_THREAD_LOCAL) + /* All lock_bit map to the same mutex. We cannot use two different locks on + * the same integer. Assert against that. */ + g_assert (lock_bit > 0); + g_assert (_object_bit_is_locked == lock_bit); + _object_bit_is_locked = 0; +#endif + + g_bit_unlock ((gint *) object_get_optional_flags_p (object), _OPTIONAL_BIT_LOCK); +} + /* --- functions --- */ static void g_object_notify_queue_free (gpointer data) @@ -3251,7 +3306,7 @@ g_object_weak_ref (GObject *object, g_return_if_fail (notify != NULL); g_return_if_fail (g_atomic_int_get (&object->ref_count) >= 1); - G_LOCK (weak_refs_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_WEAK_REFS); wstack = g_datalist_id_remove_no_notify (&object->qdata, quark_weak_notifies); if (wstack) { @@ -3268,7 +3323,7 @@ g_object_weak_ref (GObject *object, wstack->weak_refs[i].notify = notify; wstack->weak_refs[i].data = data; g_datalist_id_set_data_full (&object->qdata, quark_weak_notifies, wstack, weak_refs_notify); - G_UNLOCK (weak_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_WEAK_REFS); } /** @@ -3290,7 +3345,7 @@ g_object_weak_unref (GObject *object, g_return_if_fail (G_IS_OBJECT (object)); g_return_if_fail (notify != NULL); - G_LOCK (weak_refs_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_WEAK_REFS); wstack = g_datalist_id_get_data (&object->qdata, quark_weak_notifies); if (wstack) { @@ -3308,7 +3363,7 @@ g_object_weak_unref (GObject *object, break; } } - G_UNLOCK (weak_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_WEAK_REFS); if (!found_one) g_critical ("%s: couldn't find weak ref %p(%p)", G_STRFUNC, notify, data); } From cf044ba7ad9f301e5fad4f75962c305a47076c7e Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Wed, 20 Dec 2023 09:55:32 +0100 Subject: [PATCH 07/10] gobject: use per-object bit-lock instead of global mutex for notify-queue --- gobject/gobject.c | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/gobject/gobject.c b/gobject/gobject.c index 0278f979d..226f820fc 100644 --- a/gobject/gobject.c +++ b/gobject/gobject.c @@ -124,6 +124,7 @@ enum { * integers to GObjectPrivate. But increasing memory usage for more parallelism * (per-object!) is not worth it. */ #define OPTIONAL_BIT_LOCK_WEAK_REFS 1 +#define OPTIONAL_BIT_LOCK_NOTIFY 2 #if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P == 8 #define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 1 @@ -232,8 +233,6 @@ static guint (*floating_flag_handler) (GObject*, gint) = object_floating_flag_ha static GQuark quark_weak_locations = 0; static GRWLock weak_locations_lock; -G_LOCK_DEFINE_STATIC(notify_lock); - #if HAVE_PRIVATE G_ALWAYS_INLINE static inline GObjectPrivate * g_object_get_instance_private (GObject *object) @@ -321,7 +320,7 @@ g_object_notify_queue_freeze (GObject *object) { GObjectNotifyQueue *nqueue; - G_LOCK(notify_lock); + object_bit_lock (object, OPTIONAL_BIT_LOCK_NOTIFY); nqueue = g_datalist_id_get_data (&object->qdata, quark_notify_queue); if (!nqueue) { @@ -338,7 +337,7 @@ g_object_notify_queue_freeze (GObject *object) nqueue->freeze_count++; out: - G_UNLOCK(notify_lock); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY); return nqueue; } @@ -352,7 +351,7 @@ g_object_notify_queue_thaw (GObject *object, GSList *slist; guint n_pspecs = 0; - G_LOCK(notify_lock); + object_bit_lock (object, OPTIONAL_BIT_LOCK_NOTIFY); if (!nqueue) { @@ -363,7 +362,7 @@ g_object_notify_queue_thaw (GObject *object, /* Just make sure we never get into some nasty race condition */ if (G_UNLIKELY (!nqueue || nqueue->freeze_count == 0)) { - G_UNLOCK (notify_lock); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY); g_critical ("%s: property-changed notification for %s(%p) is not frozen", G_STRFUNC, G_OBJECT_TYPE_NAME (object), object); return; @@ -372,7 +371,7 @@ g_object_notify_queue_thaw (GObject *object, nqueue->freeze_count--; if (nqueue->freeze_count) { - G_UNLOCK (notify_lock); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY); return; } @@ -384,7 +383,7 @@ g_object_notify_queue_thaw (GObject *object, } g_datalist_id_set_data (&object->qdata, quark_notify_queue, NULL); - G_UNLOCK(notify_lock); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY); if (n_pspecs) { @@ -405,7 +404,7 @@ g_object_notify_queue_add (GObject *object, GParamSpec *pspec, gboolean in_init) { - G_LOCK(notify_lock); + object_bit_lock (object, OPTIONAL_BIT_LOCK_NOTIFY); if (!nqueue) { @@ -419,7 +418,7 @@ g_object_notify_queue_add (GObject *object, { /* We don't have a notify queue and are not in_init. The event * is not to be queued. The caller will dispatch directly. */ - G_UNLOCK (notify_lock); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY); return FALSE; } @@ -442,7 +441,7 @@ g_object_notify_queue_add (GObject *object, nqueue->n_pspecs++; } - G_UNLOCK(notify_lock); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY); return TRUE; } From ab202a2c1cc33570f1fb104c17c5a603b4a50cda Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Wed, 20 Dec 2023 09:55:32 +0100 Subject: [PATCH 08/10] gobject: use per-object bit-lock instead of global mutex for toggle refs --- gobject/gobject.c | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/gobject/gobject.c b/gobject/gobject.c index 226f820fc..5a72a867b 100644 --- a/gobject/gobject.c +++ b/gobject/gobject.c @@ -125,6 +125,7 @@ enum { * (per-object!) is not worth it. */ #define OPTIONAL_BIT_LOCK_WEAK_REFS 1 #define OPTIONAL_BIT_LOCK_NOTIFY 2 +#define OPTIONAL_BIT_LOCK_TOGGLE_REFS 3 #if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P == 8 #define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 1 @@ -221,7 +222,6 @@ struct _GObjectNotifyQueue /* --- variables --- */ G_LOCK_DEFINE_STATIC (closure_array_mutex); -G_LOCK_DEFINE_STATIC (toggle_refs_mutex); static GQuark quark_closure_array = 0; static GQuark quark_weak_notifies = 0; static GQuark quark_toggle_refs = 0; @@ -3662,7 +3662,7 @@ g_object_add_toggle_ref (GObject *object, g_object_ref (object); - G_LOCK (toggle_refs_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); tstack = g_datalist_id_remove_no_notify (&object->qdata, quark_toggle_refs); if (tstack) { @@ -3686,7 +3686,7 @@ g_object_add_toggle_ref (GObject *object, tstack->toggle_refs[i].data = data; g_datalist_id_set_data_full (&object->qdata, quark_toggle_refs, tstack, (GDestroyNotify)g_free); - G_UNLOCK (toggle_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); } /** @@ -3719,7 +3719,7 @@ g_object_remove_toggle_ref (GObject *object, g_return_if_fail (G_IS_OBJECT (object)); g_return_if_fail (notify != NULL); - G_LOCK (toggle_refs_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); tstack = g_datalist_id_get_data (&object->qdata, quark_toggle_refs); if (tstack) { @@ -3743,7 +3743,7 @@ g_object_remove_toggle_ref (GObject *object, break; } } - G_UNLOCK (toggle_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); if (found_one) g_object_unref (object); @@ -3791,11 +3791,11 @@ retry: gboolean do_retry; /* With ref count 1, check whether we need to emit a toggle notification. */ - G_LOCK (toggle_refs_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data); do_retry = !g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count, old_ref, old_ref + 1, &old_ref); - G_UNLOCK (toggle_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); if (do_retry) goto retry; } @@ -3960,18 +3960,18 @@ retry_beginning: * * We need to take a lock, to avoid races. */ - G_LOCK (toggle_refs_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data); if (!g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count, old_ref, old_ref - 1, &old_ref)) { - G_UNLOCK (toggle_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); goto retry_beginning; } - G_UNLOCK (toggle_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); /* Beware: object might be a dangling pointer. */ TRACE (GOBJECT_OBJECT_UNREF (object, obj_gtype, old_ref)); @@ -4042,12 +4042,12 @@ retry_decrement: * notification. Take a lock and check for that. * * In that case, we need a lock to get the toggle notification. */ - G_LOCK (toggle_refs_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data); do_retry = !g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count, old_ref, old_ref - 1, &old_ref); - G_UNLOCK (toggle_refs_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS); if (do_retry) goto retry_decrement; From b397ef2122be81171cde2cf13c07e5b91e01b195 Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Wed, 20 Dec 2023 09:55:32 +0100 Subject: [PATCH 09/10] gobject: use per-object bit-lock instead of global mutex for closure array --- gobject/gobject.c | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/gobject/gobject.c b/gobject/gobject.c index 5a72a867b..aa3b851b0 100644 --- a/gobject/gobject.c +++ b/gobject/gobject.c @@ -126,6 +126,7 @@ enum { #define OPTIONAL_BIT_LOCK_WEAK_REFS 1 #define OPTIONAL_BIT_LOCK_NOTIFY 2 #define OPTIONAL_BIT_LOCK_TOGGLE_REFS 3 +#define OPTIONAL_BIT_LOCK_CLOSURE_ARRAY 4 #if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P == 8 #define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 1 @@ -221,7 +222,6 @@ struct _GObjectNotifyQueue }; /* --- variables --- */ -G_LOCK_DEFINE_STATIC (closure_array_mutex); static GQuark quark_closure_array = 0; static GQuark quark_weak_notifies = 0; static GQuark quark_toggle_refs = 0; @@ -4805,7 +4805,7 @@ object_remove_closure (gpointer data, CArray *carray; guint i; - G_LOCK (closure_array_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY); carray = g_object_get_qdata (object, quark_closure_array); for (i = 0; i < carray->n_closures; i++) if (carray->closures[i] == closure) @@ -4813,10 +4813,10 @@ object_remove_closure (gpointer data, carray->n_closures--; if (i < carray->n_closures) carray->closures[i] = carray->closures[carray->n_closures]; - G_UNLOCK (closure_array_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY); return; } - G_UNLOCK (closure_array_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY); g_assert_not_reached (); } @@ -4872,7 +4872,7 @@ g_object_watch_closure (GObject *object, g_closure_add_marshal_guards (closure, object, (GClosureNotify) g_object_ref, object, (GClosureNotify) g_object_unref); - G_LOCK (closure_array_mutex); + object_bit_lock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY); carray = g_datalist_id_remove_no_notify (&object->qdata, quark_closure_array); if (!carray) { @@ -4888,7 +4888,7 @@ g_object_watch_closure (GObject *object, } carray->closures[i] = closure; g_datalist_id_set_data_full (&object->qdata, quark_closure_array, carray, destroy_closure_array); - G_UNLOCK (closure_array_mutex); + object_bit_unlock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY); } /** From e05623001b0a945cd58981b9a3921ffbf7c0255b Mon Sep 17 00:00:00 2001 From: Thomas Haller Date: Sat, 23 Dec 2023 20:06:03 +0100 Subject: [PATCH 10/10] gobject: fix deadlock with g_weak_ref_set() In general, we must not call out to external, unknown code while holding a lock. That is prone to dead lock. g_object_ref() can emit a toggle notification. In g_weak_ref_set(), we must not do that while holding the lock. --- gobject/gobject.c | 76 ++++++++++++++++++++++++++------------- gobject/tests/reference.c | 4 +-- 2 files changed, 53 insertions(+), 27 deletions(-) diff --git a/gobject/gobject.c b/gobject/gobject.c index aa3b851b0..10676c7fb 100644 --- a/gobject/gobject.c +++ b/gobject/gobject.c @@ -3751,33 +3751,25 @@ g_object_remove_toggle_ref (GObject *object, g_critical ("%s: couldn't find toggle ref %p(%p)", G_STRFUNC, notify, data); } -/** - * g_object_ref: - * @object: (type GObject.Object): a #GObject - * - * Increases the reference count of @object. - * - * Since GLib 2.56, if `GLIB_VERSION_MAX_ALLOWED` is 2.56 or greater, the type - * of @object will be propagated to the return type (using the GCC typeof() - * extension), so any casting the caller needs to do on the return type must be - * explicit. - * - * Returns: (type GObject.Object) (transfer none): the same @object - */ -gpointer -(g_object_ref) (gpointer _object) +/* Internal implementation of g_object_ref() which doesn't call out to user code. + * @out_toggle_notify and @out_toggle_data *must* be provided, and if non-`NULL` + * values are returned, then the caller *must* call that toggle notify function + * as soon as it is safe to do so. It may call (or be) user-provided code so should + * only be called once all locks are released. */ +static gpointer +object_ref (GObject *object, + GToggleNotify *out_toggle_notify, + gpointer *out_toggle_data) { - GObject *object = _object; GToggleNotify toggle_notify; gpointer toggle_data; gint old_ref; - g_return_val_if_fail (G_IS_OBJECT (object), NULL); - old_ref = g_atomic_int_get (&object->ref_count); retry: toggle_notify = NULL; + toggle_data = NULL; if (old_ref > 1 && old_ref < G_MAXINT) { /* Fast-path. We have apparently more than 1 references already. No @@ -3803,12 +3795,43 @@ retry: { gboolean object_already_finalized = TRUE; + *out_toggle_notify = NULL; + *out_toggle_data = NULL; g_return_val_if_fail (!object_already_finalized, NULL); return NULL; } TRACE (GOBJECT_OBJECT_REF (object, G_TYPE_FROM_INSTANCE (object), old_ref)); + *out_toggle_notify = toggle_notify; + *out_toggle_data = toggle_data; + return object; +} + +/** + * g_object_ref: + * @object: (type GObject.Object): a #GObject + * + * Increases the reference count of @object. + * + * Since GLib 2.56, if `GLIB_VERSION_MAX_ALLOWED` is 2.56 or greater, the type + * of @object will be propagated to the return type (using the GCC typeof() + * extension), so any casting the caller needs to do on the return type must be + * explicit. + * + * Returns: (type GObject.Object) (transfer none): the same @object + */ +gpointer +(g_object_ref) (gpointer _object) +{ + GObject *object = _object; + GToggleNotify toggle_notify; + gpointer toggle_data; + + g_return_val_if_fail (G_IS_OBJECT (object), NULL); + + object = object_ref (object, &toggle_notify, &toggle_data); + if (toggle_notify) toggle_notify (toggle_data, object, FALSE); @@ -5113,20 +5136,25 @@ g_weak_ref_clear (GWeakRef *weak_ref) gpointer g_weak_ref_get (GWeakRef *weak_ref) { - gpointer object_or_null; + GToggleNotify toggle_notify = NULL; + gpointer toggle_data = NULL; + GObject *object; - g_return_val_if_fail (weak_ref!= NULL, NULL); + g_return_val_if_fail (weak_ref, NULL); g_rw_lock_reader_lock (&weak_locations_lock); - object_or_null = weak_ref->priv.p; + object = weak_ref->priv.p; - if (object_or_null != NULL) - g_object_ref (object_or_null); + if (object) + object = object_ref (object, &toggle_notify, &toggle_data); g_rw_lock_reader_unlock (&weak_locations_lock); - return object_or_null; + if (toggle_notify) + toggle_notify (toggle_data, object, FALSE); + + return object; } static void diff --git a/gobject/tests/reference.c b/gobject/tests/reference.c index 7b73eb725..1fbe7e7ea 100644 --- a/gobject/tests/reference.c +++ b/gobject/tests/reference.c @@ -731,11 +731,9 @@ weak_ref_in_toggle_notify_toggle_cb (gpointer data, return; /* We just got a second ref, while calling g_weak_ref_get(). - * At this point, we hold a lock for the weak ref. * - * FIXME: currently we would dead lock with the lines below. + * Test that taking another weak ref in this situation works. */ - return; g_weak_ref_init (&weak2, object); g_assert_true (object == g_weak_ref_get (&weak2));