mirror of
https://gitlab.gnome.org/GNOME/glib.git
synced 2025-05-12 08:53:11 +02:00
Merge branch 'th/g-object-priv' into 'main'
[th/g-object-priv] add private data to GObject and use per-object locking See merge request GNOME/glib!3774
This commit is contained in:
commit
f9e54fc991
@ -321,4 +321,10 @@ GLibPrivateVTable *glib__private__ (void);
|
|||||||
gboolean g_uint_equal (gconstpointer v1, gconstpointer v2);
|
gboolean g_uint_equal (gconstpointer v1, gconstpointer v2);
|
||||||
guint g_uint_hash (gconstpointer v);
|
guint g_uint_hash (gconstpointer v);
|
||||||
|
|
||||||
|
#if defined(__GNUC__)
|
||||||
|
#define G_THREAD_LOCAL __thread
|
||||||
|
#else
|
||||||
|
#undef G_THREAD_LOCAL
|
||||||
|
#endif
|
||||||
|
|
||||||
#endif /* __GLIB_PRIVATE_H__ */
|
#endif /* __GLIB_PRIVATE_H__ */
|
||||||
|
@ -102,12 +102,51 @@ enum {
|
|||||||
PROP_NONE
|
PROP_NONE
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#define _OPTIONAL_BIT_LOCK 3
|
||||||
|
|
||||||
#define OPTIONAL_FLAG_IN_CONSTRUCTION (1 << 0)
|
#define OPTIONAL_FLAG_IN_CONSTRUCTION (1 << 0)
|
||||||
#define OPTIONAL_FLAG_HAS_SIGNAL_HANDLER (1 << 1) /* Set if object ever had a signal handler */
|
#define OPTIONAL_FLAG_HAS_SIGNAL_HANDLER (1 << 1) /* Set if object ever had a signal handler */
|
||||||
#define OPTIONAL_FLAG_HAS_NOTIFY_HANDLER (1 << 2) /* Same, specifically for "notify" */
|
#define OPTIONAL_FLAG_HAS_NOTIFY_HANDLER (1 << 2) /* Same, specifically for "notify" */
|
||||||
|
#define OPTIONAL_FLAG_LOCK (1 << 3) /* _OPTIONAL_BIT_LOCK */
|
||||||
|
|
||||||
|
/* We use g_bit_lock(), which only supports one lock per integer.
|
||||||
|
*
|
||||||
|
* Hence, while we have locks for different purposes, internally they all
|
||||||
|
* map to the same bit lock (_OPTIONAL_BIT_LOCK).
|
||||||
|
*
|
||||||
|
* This means you cannot take a lock (object_bit_lock()) while already holding
|
||||||
|
* another bit lock. There is an assert against that with G_ENABLE_DEBUG
|
||||||
|
* builds (_object_bit_is_locked).
|
||||||
|
*
|
||||||
|
* In the past, we had different global mutexes per topic. Now we have one
|
||||||
|
* per-object mutex for several topics. The downside is that we are not as
|
||||||
|
* parallel as possible. The alternative would be to add individual locking
|
||||||
|
* integers to GObjectPrivate. But increasing memory usage for more parallelism
|
||||||
|
* (per-object!) is not worth it. */
|
||||||
|
#define OPTIONAL_BIT_LOCK_WEAK_REFS 1
|
||||||
|
#define OPTIONAL_BIT_LOCK_NOTIFY 2
|
||||||
|
#define OPTIONAL_BIT_LOCK_TOGGLE_REFS 3
|
||||||
|
#define OPTIONAL_BIT_LOCK_CLOSURE_ARRAY 4
|
||||||
|
|
||||||
#if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P == 8
|
#if SIZEOF_INT == 4 && GLIB_SIZEOF_VOID_P == 8
|
||||||
#define HAVE_OPTIONAL_FLAGS
|
#define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 1
|
||||||
|
#else
|
||||||
|
#define HAVE_OPTIONAL_FLAGS_IN_GOBJECT 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* For now we only create a private struct if we don't have optional flags in
|
||||||
|
* GObject. Currently we don't need it otherwise. In the future we might
|
||||||
|
* always add a private struct. */
|
||||||
|
#define HAVE_PRIVATE (!HAVE_OPTIONAL_FLAGS_IN_GOBJECT)
|
||||||
|
|
||||||
|
#if HAVE_PRIVATE
|
||||||
|
typedef struct {
|
||||||
|
#if !HAVE_OPTIONAL_FLAGS_IN_GOBJECT
|
||||||
|
guint optional_flags; /* (atomic) */
|
||||||
|
#endif
|
||||||
|
} GObjectPrivate;
|
||||||
|
|
||||||
|
static int GObject_private_offset;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
typedef struct
|
typedef struct
|
||||||
@ -116,7 +155,7 @@ typedef struct
|
|||||||
|
|
||||||
/*< private >*/
|
/*< private >*/
|
||||||
guint ref_count; /* (atomic) */
|
guint ref_count; /* (atomic) */
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
#if HAVE_OPTIONAL_FLAGS_IN_GOBJECT
|
||||||
guint optional_flags; /* (atomic) */
|
guint optional_flags; /* (atomic) */
|
||||||
#endif
|
#endif
|
||||||
GData *qdata;
|
GData *qdata;
|
||||||
@ -183,16 +222,10 @@ struct _GObjectNotifyQueue
|
|||||||
};
|
};
|
||||||
|
|
||||||
/* --- variables --- */
|
/* --- variables --- */
|
||||||
G_LOCK_DEFINE_STATIC (closure_array_mutex);
|
|
||||||
G_LOCK_DEFINE_STATIC (weak_refs_mutex);
|
|
||||||
G_LOCK_DEFINE_STATIC (toggle_refs_mutex);
|
|
||||||
static GQuark quark_closure_array = 0;
|
static GQuark quark_closure_array = 0;
|
||||||
static GQuark quark_weak_notifies = 0;
|
static GQuark quark_weak_notifies = 0;
|
||||||
static GQuark quark_toggle_refs = 0;
|
static GQuark quark_toggle_refs = 0;
|
||||||
static GQuark quark_notify_queue;
|
static GQuark quark_notify_queue;
|
||||||
#ifndef HAVE_OPTIONAL_FLAGS
|
|
||||||
static GQuark quark_in_construction;
|
|
||||||
#endif
|
|
||||||
static GParamSpecPool *pspec_pool = NULL;
|
static GParamSpecPool *pspec_pool = NULL;
|
||||||
static gulong gobject_signals[LAST_SIGNAL] = { 0, };
|
static gulong gobject_signals[LAST_SIGNAL] = { 0, };
|
||||||
static guint (*floating_flag_handler) (GObject*, gint) = object_floating_flag_handler;
|
static guint (*floating_flag_handler) (GObject*, gint) = object_floating_flag_handler;
|
||||||
@ -200,7 +233,60 @@ static guint (*floating_flag_handler) (GObject*, gint) = object_floating_flag_ha
|
|||||||
static GQuark quark_weak_locations = 0;
|
static GQuark quark_weak_locations = 0;
|
||||||
static GRWLock weak_locations_lock;
|
static GRWLock weak_locations_lock;
|
||||||
|
|
||||||
G_LOCK_DEFINE_STATIC(notify_lock);
|
#if HAVE_PRIVATE
|
||||||
|
G_ALWAYS_INLINE static inline GObjectPrivate *
|
||||||
|
g_object_get_instance_private (GObject *object)
|
||||||
|
{
|
||||||
|
return G_STRUCT_MEMBER_P (object, GObject_private_offset);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
G_ALWAYS_INLINE static inline guint *
|
||||||
|
object_get_optional_flags_p (GObject *object)
|
||||||
|
{
|
||||||
|
#if HAVE_OPTIONAL_FLAGS_IN_GOBJECT
|
||||||
|
return &(((GObjectReal *) object)->optional_flags);
|
||||||
|
#else
|
||||||
|
return &g_object_get_instance_private (object)->optional_flags;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(G_ENABLE_DEBUG) && defined(G_THREAD_LOCAL)
|
||||||
|
/* Using this thread-local global is sufficient to guard the per-object
|
||||||
|
* locking, because while the current thread holds a lock on one object, it
|
||||||
|
* never calls out to another object (because doing so would would be prone to
|
||||||
|
* deadlock). */
|
||||||
|
static G_THREAD_LOCAL guint _object_bit_is_locked;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static void
|
||||||
|
object_bit_lock (GObject *object, guint lock_bit)
|
||||||
|
{
|
||||||
|
#if defined(G_ENABLE_DEBUG) && defined(G_THREAD_LOCAL)
|
||||||
|
/* all object_bit_lock() really use the same bit/mutex. The "lock_bit" argument
|
||||||
|
* only exists for asserting. object_bit_lock() is not re-entrant (also not with
|
||||||
|
* different "lock_bit" values). */
|
||||||
|
g_assert (lock_bit > 0);
|
||||||
|
g_assert (_object_bit_is_locked == 0);
|
||||||
|
_object_bit_is_locked = lock_bit;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
g_bit_lock ((gint *) object_get_optional_flags_p (object), _OPTIONAL_BIT_LOCK);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
object_bit_unlock (GObject *object, guint lock_bit)
|
||||||
|
{
|
||||||
|
#if defined(G_ENABLE_DEBUG) && defined(G_THREAD_LOCAL)
|
||||||
|
/* All lock_bit map to the same mutex. We cannot use two different locks on
|
||||||
|
* the same integer. Assert against that. */
|
||||||
|
g_assert (lock_bit > 0);
|
||||||
|
g_assert (_object_bit_is_locked == lock_bit);
|
||||||
|
_object_bit_is_locked = 0;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
g_bit_unlock ((gint *) object_get_optional_flags_p (object), _OPTIONAL_BIT_LOCK);
|
||||||
|
}
|
||||||
|
|
||||||
/* --- functions --- */
|
/* --- functions --- */
|
||||||
static void
|
static void
|
||||||
@ -234,7 +320,7 @@ g_object_notify_queue_freeze (GObject *object)
|
|||||||
{
|
{
|
||||||
GObjectNotifyQueue *nqueue;
|
GObjectNotifyQueue *nqueue;
|
||||||
|
|
||||||
G_LOCK(notify_lock);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
nqueue = g_datalist_id_get_data (&object->qdata, quark_notify_queue);
|
nqueue = g_datalist_id_get_data (&object->qdata, quark_notify_queue);
|
||||||
if (!nqueue)
|
if (!nqueue)
|
||||||
{
|
{
|
||||||
@ -251,7 +337,7 @@ g_object_notify_queue_freeze (GObject *object)
|
|||||||
nqueue->freeze_count++;
|
nqueue->freeze_count++;
|
||||||
|
|
||||||
out:
|
out:
|
||||||
G_UNLOCK(notify_lock);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
|
|
||||||
return nqueue;
|
return nqueue;
|
||||||
}
|
}
|
||||||
@ -265,7 +351,7 @@ g_object_notify_queue_thaw (GObject *object,
|
|||||||
GSList *slist;
|
GSList *slist;
|
||||||
guint n_pspecs = 0;
|
guint n_pspecs = 0;
|
||||||
|
|
||||||
G_LOCK(notify_lock);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
|
|
||||||
if (!nqueue)
|
if (!nqueue)
|
||||||
{
|
{
|
||||||
@ -276,7 +362,7 @@ g_object_notify_queue_thaw (GObject *object,
|
|||||||
/* Just make sure we never get into some nasty race condition */
|
/* Just make sure we never get into some nasty race condition */
|
||||||
if (G_UNLIKELY (!nqueue || nqueue->freeze_count == 0))
|
if (G_UNLIKELY (!nqueue || nqueue->freeze_count == 0))
|
||||||
{
|
{
|
||||||
G_UNLOCK (notify_lock);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
g_critical ("%s: property-changed notification for %s(%p) is not frozen",
|
g_critical ("%s: property-changed notification for %s(%p) is not frozen",
|
||||||
G_STRFUNC, G_OBJECT_TYPE_NAME (object), object);
|
G_STRFUNC, G_OBJECT_TYPE_NAME (object), object);
|
||||||
return;
|
return;
|
||||||
@ -285,7 +371,7 @@ g_object_notify_queue_thaw (GObject *object,
|
|||||||
nqueue->freeze_count--;
|
nqueue->freeze_count--;
|
||||||
if (nqueue->freeze_count)
|
if (nqueue->freeze_count)
|
||||||
{
|
{
|
||||||
G_UNLOCK (notify_lock);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -297,7 +383,7 @@ g_object_notify_queue_thaw (GObject *object,
|
|||||||
}
|
}
|
||||||
g_datalist_id_set_data (&object->qdata, quark_notify_queue, NULL);
|
g_datalist_id_set_data (&object->qdata, quark_notify_queue, NULL);
|
||||||
|
|
||||||
G_UNLOCK(notify_lock);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
|
|
||||||
if (n_pspecs)
|
if (n_pspecs)
|
||||||
{
|
{
|
||||||
@ -318,7 +404,7 @@ g_object_notify_queue_add (GObject *object,
|
|||||||
GParamSpec *pspec,
|
GParamSpec *pspec,
|
||||||
gboolean in_init)
|
gboolean in_init)
|
||||||
{
|
{
|
||||||
G_LOCK(notify_lock);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
|
|
||||||
if (!nqueue)
|
if (!nqueue)
|
||||||
{
|
{
|
||||||
@ -332,7 +418,7 @@ g_object_notify_queue_add (GObject *object,
|
|||||||
{
|
{
|
||||||
/* We don't have a notify queue and are not in_init. The event
|
/* We don't have a notify queue and are not in_init. The event
|
||||||
* is not to be queued. The caller will dispatch directly. */
|
* is not to be queued. The caller will dispatch directly. */
|
||||||
G_UNLOCK (notify_lock);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -355,7 +441,7 @@ g_object_notify_queue_add (GObject *object,
|
|||||||
nqueue->n_pspecs++;
|
nqueue->n_pspecs++;
|
||||||
}
|
}
|
||||||
|
|
||||||
G_UNLOCK(notify_lock);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_NOTIFY);
|
||||||
|
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
@ -455,6 +541,11 @@ _g_object_type_init (void)
|
|||||||
# endif /* G_HAS_CONSTRUCTORS */
|
# endif /* G_HAS_CONSTRUCTORS */
|
||||||
}
|
}
|
||||||
#endif /* G_ENABLE_DEBUG */
|
#endif /* G_ENABLE_DEBUG */
|
||||||
|
|
||||||
|
#if HAVE_PRIVATE
|
||||||
|
GObject_private_offset =
|
||||||
|
g_type_add_instance_private (G_TYPE_OBJECT, sizeof (GObjectPrivate));
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Initialize the global GParamSpecPool; this function needs to be
|
/* Initialize the global GParamSpecPool; this function needs to be
|
||||||
@ -529,9 +620,6 @@ g_object_do_class_init (GObjectClass *class)
|
|||||||
quark_weak_locations = g_quark_from_static_string ("GObject-weak-locations");
|
quark_weak_locations = g_quark_from_static_string ("GObject-weak-locations");
|
||||||
quark_toggle_refs = g_quark_from_static_string ("GObject-toggle-references");
|
quark_toggle_refs = g_quark_from_static_string ("GObject-toggle-references");
|
||||||
quark_notify_queue = g_quark_from_static_string ("GObject-notify-queue");
|
quark_notify_queue = g_quark_from_static_string ("GObject-notify-queue");
|
||||||
#ifndef HAVE_OPTIONAL_FLAGS
|
|
||||||
quark_in_construction = g_quark_from_static_string ("GObject-in-construction");
|
|
||||||
#endif
|
|
||||||
|
|
||||||
g_object_init_pspec_pool ();
|
g_object_init_pspec_pool ();
|
||||||
|
|
||||||
@ -589,6 +677,10 @@ g_object_do_class_init (GObjectClass *class)
|
|||||||
* implement an interface implement all properties for that interface
|
* implement an interface implement all properties for that interface
|
||||||
*/
|
*/
|
||||||
g_type_add_interface_check (NULL, object_interface_check_properties);
|
g_type_add_interface_check (NULL, object_interface_check_properties);
|
||||||
|
|
||||||
|
#if HAVE_PRIVATE
|
||||||
|
g_type_class_adjust_private_offset (class, &GObject_private_offset);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Sinks @pspec if it’s a floating ref. */
|
/* Sinks @pspec if it’s a floating ref. */
|
||||||
@ -1183,133 +1275,62 @@ g_object_interface_list_properties (gpointer g_iface,
|
|||||||
static inline guint
|
static inline guint
|
||||||
object_get_optional_flags (GObject *object)
|
object_get_optional_flags (GObject *object)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
return g_atomic_int_get (object_get_optional_flags_p (object));
|
||||||
GObjectReal *real = (GObjectReal *)object;
|
|
||||||
return (guint)g_atomic_int_get (&real->optional_flags);
|
|
||||||
#else
|
|
||||||
return 0;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Variant of object_get_optional_flags for when
|
|
||||||
* we know that we have exclusive access (during
|
|
||||||
* construction)
|
|
||||||
*/
|
|
||||||
static inline guint
|
|
||||||
object_get_optional_flags_X (GObject *object)
|
|
||||||
{
|
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
GObjectReal *real = (GObjectReal *)object;
|
|
||||||
return real->optional_flags;
|
|
||||||
#else
|
|
||||||
return 0;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
static inline void
|
static inline void
|
||||||
object_set_optional_flags (GObject *object,
|
object_set_optional_flags (GObject *object,
|
||||||
guint flags)
|
guint flags)
|
||||||
{
|
{
|
||||||
GObjectReal *real = (GObjectReal *)object;
|
g_atomic_int_or (object_get_optional_flags_p (object), flags);
|
||||||
g_atomic_int_or (&real->optional_flags, flags);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Variant for when we have exclusive access
|
|
||||||
* (during construction)
|
|
||||||
*/
|
|
||||||
static inline void
|
static inline void
|
||||||
object_set_optional_flags_X (GObject *object,
|
object_unset_optional_flags (GObject *object,
|
||||||
guint flags)
|
|
||||||
{
|
|
||||||
GObjectReal *real = (GObjectReal *)object;
|
|
||||||
real->optional_flags |= flags;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Variant for when we have exclusive access
|
|
||||||
* (during construction)
|
|
||||||
*/
|
|
||||||
static inline void
|
|
||||||
object_unset_optional_flags_X (GObject *object,
|
|
||||||
guint flags)
|
guint flags)
|
||||||
{
|
{
|
||||||
GObjectReal *real = (GObjectReal *)object;
|
g_atomic_int_and (object_get_optional_flags_p (object), ~flags);
|
||||||
real->optional_flags &= ~flags;
|
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
||||||
gboolean
|
gboolean
|
||||||
_g_object_has_signal_handler (GObject *object)
|
_g_object_has_signal_handler (GObject *object)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
return (object_get_optional_flags (object) & OPTIONAL_FLAG_HAS_SIGNAL_HANDLER) != 0;
|
return (object_get_optional_flags (object) & OPTIONAL_FLAG_HAS_SIGNAL_HANDLER) != 0;
|
||||||
#else
|
|
||||||
return TRUE;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline gboolean
|
static inline gboolean
|
||||||
_g_object_has_notify_handler (GObject *object)
|
_g_object_has_notify_handler (GObject *object)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
return CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object)) ||
|
return CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object)) ||
|
||||||
(object_get_optional_flags (object) & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0;
|
(object_get_optional_flags (object) & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0;
|
||||||
#else
|
|
||||||
return TRUE;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline gboolean
|
|
||||||
_g_object_has_notify_handler_X (GObject *object)
|
|
||||||
{
|
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
return CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object)) ||
|
|
||||||
(object_get_optional_flags_X (object) & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0;
|
|
||||||
#else
|
|
||||||
return TRUE;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
_g_object_set_has_signal_handler (GObject *object,
|
_g_object_set_has_signal_handler (GObject *object,
|
||||||
guint signal_id)
|
guint signal_id)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
guint flags = OPTIONAL_FLAG_HAS_SIGNAL_HANDLER;
|
guint flags = OPTIONAL_FLAG_HAS_SIGNAL_HANDLER;
|
||||||
if (signal_id == gobject_signals[NOTIFY])
|
if (signal_id == gobject_signals[NOTIFY])
|
||||||
flags |= OPTIONAL_FLAG_HAS_NOTIFY_HANDLER;
|
flags |= OPTIONAL_FLAG_HAS_NOTIFY_HANDLER;
|
||||||
object_set_optional_flags (object, flags);
|
object_set_optional_flags (object, flags);
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline gboolean
|
static inline gboolean
|
||||||
object_in_construction (GObject *object)
|
object_in_construction (GObject *object)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
return (object_get_optional_flags (object) & OPTIONAL_FLAG_IN_CONSTRUCTION) != 0;
|
return (object_get_optional_flags (object) & OPTIONAL_FLAG_IN_CONSTRUCTION) != 0;
|
||||||
#else
|
|
||||||
return g_datalist_id_get_data (&object->qdata, quark_in_construction) != NULL;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void
|
static inline void
|
||||||
set_object_in_construction (GObject *object)
|
set_object_in_construction (GObject *object)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
object_set_optional_flags (object, OPTIONAL_FLAG_IN_CONSTRUCTION);
|
||||||
object_set_optional_flags_X (object, OPTIONAL_FLAG_IN_CONSTRUCTION);
|
|
||||||
#else
|
|
||||||
g_datalist_id_set_data (&object->qdata, quark_in_construction, object);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void
|
static inline void
|
||||||
unset_object_in_construction (GObject *object)
|
unset_object_in_construction (GObject *object)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
object_unset_optional_flags (object, OPTIONAL_FLAG_IN_CONSTRUCTION);
|
||||||
object_unset_optional_flags_X (object, OPTIONAL_FLAG_IN_CONSTRUCTION);
|
|
||||||
#else
|
|
||||||
g_datalist_id_set_data (&object->qdata, quark_in_construction, NULL);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
@ -1498,9 +1519,7 @@ static inline void
|
|||||||
g_object_notify_by_spec_internal (GObject *object,
|
g_object_notify_by_spec_internal (GObject *object,
|
||||||
GParamSpec *pspec)
|
GParamSpec *pspec)
|
||||||
{
|
{
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
guint object_flags;
|
guint object_flags;
|
||||||
#endif
|
|
||||||
gboolean needs_notify;
|
gboolean needs_notify;
|
||||||
gboolean in_init;
|
gboolean in_init;
|
||||||
|
|
||||||
@ -1509,16 +1528,11 @@ g_object_notify_by_spec_internal (GObject *object,
|
|||||||
|
|
||||||
param_spec_follow_override (&pspec);
|
param_spec_follow_override (&pspec);
|
||||||
|
|
||||||
#ifdef HAVE_OPTIONAL_FLAGS
|
|
||||||
/* get all flags we need with a single atomic read */
|
/* get all flags we need with a single atomic read */
|
||||||
object_flags = object_get_optional_flags (object);
|
object_flags = object_get_optional_flags (object);
|
||||||
needs_notify = ((object_flags & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0) ||
|
needs_notify = ((object_flags & OPTIONAL_FLAG_HAS_NOTIFY_HANDLER) != 0) ||
|
||||||
CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object));
|
CLASS_NEEDS_NOTIFY (G_OBJECT_GET_CLASS (object));
|
||||||
in_init = (object_flags & OPTIONAL_FLAG_IN_CONSTRUCTION) != 0;
|
in_init = (object_flags & OPTIONAL_FLAG_IN_CONSTRUCTION) != 0;
|
||||||
#else
|
|
||||||
needs_notify = TRUE;
|
|
||||||
in_init = object_in_construction (object);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (pspec != NULL && needs_notify)
|
if (pspec != NULL && needs_notify)
|
||||||
{
|
{
|
||||||
@ -2178,7 +2192,7 @@ g_object_new_with_custom_constructor (GObjectClass *class,
|
|||||||
|
|
||||||
if (CLASS_HAS_PROPS (class))
|
if (CLASS_HAS_PROPS (class))
|
||||||
{
|
{
|
||||||
if ((newly_constructed && _g_object_has_notify_handler_X (object)) ||
|
if ((newly_constructed && _g_object_has_notify_handler (object)) ||
|
||||||
_g_object_has_notify_handler (object))
|
_g_object_has_notify_handler (object))
|
||||||
{
|
{
|
||||||
/* This may or may not have been setup in g_object_init().
|
/* This may or may not have been setup in g_object_init().
|
||||||
@ -2228,7 +2242,7 @@ g_object_new_internal (GObjectClass *class,
|
|||||||
{
|
{
|
||||||
GSList *node;
|
GSList *node;
|
||||||
|
|
||||||
if (_g_object_has_notify_handler_X (object))
|
if (_g_object_has_notify_handler (object))
|
||||||
{
|
{
|
||||||
/* This may or may not have been setup in g_object_init().
|
/* This may or may not have been setup in g_object_init().
|
||||||
* If it hasn't, we do it now.
|
* If it hasn't, we do it now.
|
||||||
@ -3291,7 +3305,7 @@ g_object_weak_ref (GObject *object,
|
|||||||
g_return_if_fail (notify != NULL);
|
g_return_if_fail (notify != NULL);
|
||||||
g_return_if_fail (g_atomic_int_get (&object->ref_count) >= 1);
|
g_return_if_fail (g_atomic_int_get (&object->ref_count) >= 1);
|
||||||
|
|
||||||
G_LOCK (weak_refs_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_WEAK_REFS);
|
||||||
wstack = g_datalist_id_remove_no_notify (&object->qdata, quark_weak_notifies);
|
wstack = g_datalist_id_remove_no_notify (&object->qdata, quark_weak_notifies);
|
||||||
if (wstack)
|
if (wstack)
|
||||||
{
|
{
|
||||||
@ -3308,7 +3322,7 @@ g_object_weak_ref (GObject *object,
|
|||||||
wstack->weak_refs[i].notify = notify;
|
wstack->weak_refs[i].notify = notify;
|
||||||
wstack->weak_refs[i].data = data;
|
wstack->weak_refs[i].data = data;
|
||||||
g_datalist_id_set_data_full (&object->qdata, quark_weak_notifies, wstack, weak_refs_notify);
|
g_datalist_id_set_data_full (&object->qdata, quark_weak_notifies, wstack, weak_refs_notify);
|
||||||
G_UNLOCK (weak_refs_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_WEAK_REFS);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -3330,7 +3344,7 @@ g_object_weak_unref (GObject *object,
|
|||||||
g_return_if_fail (G_IS_OBJECT (object));
|
g_return_if_fail (G_IS_OBJECT (object));
|
||||||
g_return_if_fail (notify != NULL);
|
g_return_if_fail (notify != NULL);
|
||||||
|
|
||||||
G_LOCK (weak_refs_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_WEAK_REFS);
|
||||||
wstack = g_datalist_id_get_data (&object->qdata, quark_weak_notifies);
|
wstack = g_datalist_id_get_data (&object->qdata, quark_weak_notifies);
|
||||||
if (wstack)
|
if (wstack)
|
||||||
{
|
{
|
||||||
@ -3348,7 +3362,7 @@ g_object_weak_unref (GObject *object,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
G_UNLOCK (weak_refs_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_WEAK_REFS);
|
||||||
if (!found_one)
|
if (!found_one)
|
||||||
g_critical ("%s: couldn't find weak ref %p(%p)", G_STRFUNC, notify, data);
|
g_critical ("%s: couldn't find weak ref %p(%p)", G_STRFUNC, notify, data);
|
||||||
}
|
}
|
||||||
@ -3648,7 +3662,7 @@ g_object_add_toggle_ref (GObject *object,
|
|||||||
|
|
||||||
g_object_ref (object);
|
g_object_ref (object);
|
||||||
|
|
||||||
G_LOCK (toggle_refs_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
tstack = g_datalist_id_remove_no_notify (&object->qdata, quark_toggle_refs);
|
tstack = g_datalist_id_remove_no_notify (&object->qdata, quark_toggle_refs);
|
||||||
if (tstack)
|
if (tstack)
|
||||||
{
|
{
|
||||||
@ -3672,7 +3686,7 @@ g_object_add_toggle_ref (GObject *object,
|
|||||||
tstack->toggle_refs[i].data = data;
|
tstack->toggle_refs[i].data = data;
|
||||||
g_datalist_id_set_data_full (&object->qdata, quark_toggle_refs, tstack,
|
g_datalist_id_set_data_full (&object->qdata, quark_toggle_refs, tstack,
|
||||||
(GDestroyNotify)g_free);
|
(GDestroyNotify)g_free);
|
||||||
G_UNLOCK (toggle_refs_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -3705,7 +3719,7 @@ g_object_remove_toggle_ref (GObject *object,
|
|||||||
g_return_if_fail (G_IS_OBJECT (object));
|
g_return_if_fail (G_IS_OBJECT (object));
|
||||||
g_return_if_fail (notify != NULL);
|
g_return_if_fail (notify != NULL);
|
||||||
|
|
||||||
G_LOCK (toggle_refs_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
tstack = g_datalist_id_get_data (&object->qdata, quark_toggle_refs);
|
tstack = g_datalist_id_get_data (&object->qdata, quark_toggle_refs);
|
||||||
if (tstack)
|
if (tstack)
|
||||||
{
|
{
|
||||||
@ -3729,7 +3743,7 @@ g_object_remove_toggle_ref (GObject *object,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
G_UNLOCK (toggle_refs_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
|
|
||||||
if (found_one)
|
if (found_one)
|
||||||
g_object_unref (object);
|
g_object_unref (object);
|
||||||
@ -3737,6 +3751,63 @@ g_object_remove_toggle_ref (GObject *object,
|
|||||||
g_critical ("%s: couldn't find toggle ref %p(%p)", G_STRFUNC, notify, data);
|
g_critical ("%s: couldn't find toggle ref %p(%p)", G_STRFUNC, notify, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Internal implementation of g_object_ref() which doesn't call out to user code.
|
||||||
|
* @out_toggle_notify and @out_toggle_data *must* be provided, and if non-`NULL`
|
||||||
|
* values are returned, then the caller *must* call that toggle notify function
|
||||||
|
* as soon as it is safe to do so. It may call (or be) user-provided code so should
|
||||||
|
* only be called once all locks are released. */
|
||||||
|
static gpointer
|
||||||
|
object_ref (GObject *object,
|
||||||
|
GToggleNotify *out_toggle_notify,
|
||||||
|
gpointer *out_toggle_data)
|
||||||
|
{
|
||||||
|
GToggleNotify toggle_notify;
|
||||||
|
gpointer toggle_data;
|
||||||
|
gint old_ref;
|
||||||
|
|
||||||
|
old_ref = g_atomic_int_get (&object->ref_count);
|
||||||
|
|
||||||
|
retry:
|
||||||
|
toggle_notify = NULL;
|
||||||
|
toggle_data = NULL;
|
||||||
|
if (old_ref > 1 && old_ref < G_MAXINT)
|
||||||
|
{
|
||||||
|
/* Fast-path. We have apparently more than 1 references already. No
|
||||||
|
* special handling for toggle references, just increment the ref count. */
|
||||||
|
if (!g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
||||||
|
old_ref, old_ref + 1, &old_ref))
|
||||||
|
goto retry;
|
||||||
|
}
|
||||||
|
else if (old_ref == 1)
|
||||||
|
{
|
||||||
|
gboolean do_retry;
|
||||||
|
|
||||||
|
/* With ref count 1, check whether we need to emit a toggle notification. */
|
||||||
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
|
toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data);
|
||||||
|
do_retry = !g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
||||||
|
old_ref, old_ref + 1, &old_ref);
|
||||||
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
|
if (do_retry)
|
||||||
|
goto retry;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
gboolean object_already_finalized = TRUE;
|
||||||
|
|
||||||
|
*out_toggle_notify = NULL;
|
||||||
|
*out_toggle_data = NULL;
|
||||||
|
g_return_val_if_fail (!object_already_finalized, NULL);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
TRACE (GOBJECT_OBJECT_REF (object, G_TYPE_FROM_INSTANCE (object), old_ref));
|
||||||
|
|
||||||
|
*out_toggle_notify = toggle_notify;
|
||||||
|
*out_toggle_data = toggle_data;
|
||||||
|
return object;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* g_object_ref:
|
* g_object_ref:
|
||||||
* @object: (type GObject.Object): a #GObject
|
* @object: (type GObject.Object): a #GObject
|
||||||
@ -3756,44 +3827,10 @@ gpointer
|
|||||||
GObject *object = _object;
|
GObject *object = _object;
|
||||||
GToggleNotify toggle_notify;
|
GToggleNotify toggle_notify;
|
||||||
gpointer toggle_data;
|
gpointer toggle_data;
|
||||||
gint old_ref;
|
|
||||||
|
|
||||||
g_return_val_if_fail (G_IS_OBJECT (object), NULL);
|
g_return_val_if_fail (G_IS_OBJECT (object), NULL);
|
||||||
|
|
||||||
old_ref = g_atomic_int_get (&object->ref_count);
|
object = object_ref (object, &toggle_notify, &toggle_data);
|
||||||
|
|
||||||
retry:
|
|
||||||
toggle_notify = NULL;
|
|
||||||
if (old_ref > 1 && old_ref < G_MAXINT)
|
|
||||||
{
|
|
||||||
/* Fast-path. We have apparently more than 1 references already. No
|
|
||||||
* special handling for toggle references, just increment the ref count. */
|
|
||||||
if (!g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
|
||||||
old_ref, old_ref + 1, &old_ref))
|
|
||||||
goto retry;
|
|
||||||
}
|
|
||||||
else if (old_ref == 1)
|
|
||||||
{
|
|
||||||
gboolean do_retry;
|
|
||||||
|
|
||||||
/* With ref count 1, check whether we need to emit a toggle notification. */
|
|
||||||
G_LOCK (toggle_refs_mutex);
|
|
||||||
toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data);
|
|
||||||
do_retry = !g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
|
||||||
old_ref, old_ref + 1, &old_ref);
|
|
||||||
G_UNLOCK (toggle_refs_mutex);
|
|
||||||
if (do_retry)
|
|
||||||
goto retry;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
gboolean object_already_finalized = TRUE;
|
|
||||||
|
|
||||||
g_return_val_if_fail (!object_already_finalized, NULL);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
TRACE (GOBJECT_OBJECT_REF (object, G_TYPE_FROM_INSTANCE (object), old_ref));
|
|
||||||
|
|
||||||
if (toggle_notify)
|
if (toggle_notify)
|
||||||
toggle_notify (toggle_data, object, FALSE);
|
toggle_notify (toggle_data, object, FALSE);
|
||||||
@ -3946,18 +3983,18 @@ retry_beginning:
|
|||||||
*
|
*
|
||||||
* We need to take a lock, to avoid races. */
|
* We need to take a lock, to avoid races. */
|
||||||
|
|
||||||
G_LOCK (toggle_refs_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
|
|
||||||
toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data);
|
toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data);
|
||||||
|
|
||||||
if (!g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
if (!g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
||||||
old_ref, old_ref - 1, &old_ref))
|
old_ref, old_ref - 1, &old_ref))
|
||||||
{
|
{
|
||||||
G_UNLOCK (toggle_refs_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
goto retry_beginning;
|
goto retry_beginning;
|
||||||
}
|
}
|
||||||
|
|
||||||
G_UNLOCK (toggle_refs_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
|
|
||||||
/* Beware: object might be a dangling pointer. */
|
/* Beware: object might be a dangling pointer. */
|
||||||
TRACE (GOBJECT_OBJECT_UNREF (object, obj_gtype, old_ref));
|
TRACE (GOBJECT_OBJECT_UNREF (object, obj_gtype, old_ref));
|
||||||
@ -4028,12 +4065,12 @@ retry_decrement:
|
|||||||
* notification. Take a lock and check for that.
|
* notification. Take a lock and check for that.
|
||||||
*
|
*
|
||||||
* In that case, we need a lock to get the toggle notification. */
|
* In that case, we need a lock to get the toggle notification. */
|
||||||
G_LOCK (toggle_refs_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data);
|
toggle_notify = toggle_refs_get_notify_unlocked (object, &toggle_data);
|
||||||
do_retry = !g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
do_retry = !g_atomic_int_compare_and_exchange_full ((int *) &object->ref_count,
|
||||||
old_ref, old_ref - 1,
|
old_ref, old_ref - 1,
|
||||||
&old_ref);
|
&old_ref);
|
||||||
G_UNLOCK (toggle_refs_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_TOGGLE_REFS);
|
||||||
|
|
||||||
if (do_retry)
|
if (do_retry)
|
||||||
goto retry_decrement;
|
goto retry_decrement;
|
||||||
@ -4791,7 +4828,7 @@ object_remove_closure (gpointer data,
|
|||||||
CArray *carray;
|
CArray *carray;
|
||||||
guint i;
|
guint i;
|
||||||
|
|
||||||
G_LOCK (closure_array_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY);
|
||||||
carray = g_object_get_qdata (object, quark_closure_array);
|
carray = g_object_get_qdata (object, quark_closure_array);
|
||||||
for (i = 0; i < carray->n_closures; i++)
|
for (i = 0; i < carray->n_closures; i++)
|
||||||
if (carray->closures[i] == closure)
|
if (carray->closures[i] == closure)
|
||||||
@ -4799,10 +4836,10 @@ object_remove_closure (gpointer data,
|
|||||||
carray->n_closures--;
|
carray->n_closures--;
|
||||||
if (i < carray->n_closures)
|
if (i < carray->n_closures)
|
||||||
carray->closures[i] = carray->closures[carray->n_closures];
|
carray->closures[i] = carray->closures[carray->n_closures];
|
||||||
G_UNLOCK (closure_array_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
G_UNLOCK (closure_array_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY);
|
||||||
g_assert_not_reached ();
|
g_assert_not_reached ();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4858,7 +4895,7 @@ g_object_watch_closure (GObject *object,
|
|||||||
g_closure_add_marshal_guards (closure,
|
g_closure_add_marshal_guards (closure,
|
||||||
object, (GClosureNotify) g_object_ref,
|
object, (GClosureNotify) g_object_ref,
|
||||||
object, (GClosureNotify) g_object_unref);
|
object, (GClosureNotify) g_object_unref);
|
||||||
G_LOCK (closure_array_mutex);
|
object_bit_lock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY);
|
||||||
carray = g_datalist_id_remove_no_notify (&object->qdata, quark_closure_array);
|
carray = g_datalist_id_remove_no_notify (&object->qdata, quark_closure_array);
|
||||||
if (!carray)
|
if (!carray)
|
||||||
{
|
{
|
||||||
@ -4874,7 +4911,7 @@ g_object_watch_closure (GObject *object,
|
|||||||
}
|
}
|
||||||
carray->closures[i] = closure;
|
carray->closures[i] = closure;
|
||||||
g_datalist_id_set_data_full (&object->qdata, quark_closure_array, carray, destroy_closure_array);
|
g_datalist_id_set_data_full (&object->qdata, quark_closure_array, carray, destroy_closure_array);
|
||||||
G_UNLOCK (closure_array_mutex);
|
object_bit_unlock (object, OPTIONAL_BIT_LOCK_CLOSURE_ARRAY);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -5099,20 +5136,25 @@ g_weak_ref_clear (GWeakRef *weak_ref)
|
|||||||
gpointer
|
gpointer
|
||||||
g_weak_ref_get (GWeakRef *weak_ref)
|
g_weak_ref_get (GWeakRef *weak_ref)
|
||||||
{
|
{
|
||||||
gpointer object_or_null;
|
GToggleNotify toggle_notify = NULL;
|
||||||
|
gpointer toggle_data = NULL;
|
||||||
|
GObject *object;
|
||||||
|
|
||||||
g_return_val_if_fail (weak_ref!= NULL, NULL);
|
g_return_val_if_fail (weak_ref, NULL);
|
||||||
|
|
||||||
g_rw_lock_reader_lock (&weak_locations_lock);
|
g_rw_lock_reader_lock (&weak_locations_lock);
|
||||||
|
|
||||||
object_or_null = weak_ref->priv.p;
|
object = weak_ref->priv.p;
|
||||||
|
|
||||||
if (object_or_null != NULL)
|
if (object)
|
||||||
g_object_ref (object_or_null);
|
object = object_ref (object, &toggle_notify, &toggle_data);
|
||||||
|
|
||||||
g_rw_lock_reader_unlock (&weak_locations_lock);
|
g_rw_lock_reader_unlock (&weak_locations_lock);
|
||||||
|
|
||||||
return object_or_null;
|
if (toggle_notify)
|
||||||
|
toggle_notify (toggle_data, object, FALSE);
|
||||||
|
|
||||||
|
return object;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
|
@ -719,6 +719,224 @@ test_weak_ref_on_toggle_notify (void)
|
|||||||
g_assert_null (g_weak_ref_get (&weak));
|
g_assert_null (g_weak_ref_get (&weak));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
weak_ref_in_toggle_notify_toggle_cb (gpointer data,
|
||||||
|
GObject *object,
|
||||||
|
gboolean is_last_ref)
|
||||||
|
{
|
||||||
|
GWeakRef weak2;
|
||||||
|
GObject *obj2;
|
||||||
|
|
||||||
|
if (is_last_ref)
|
||||||
|
return;
|
||||||
|
|
||||||
|
/* We just got a second ref, while calling g_weak_ref_get().
|
||||||
|
*
|
||||||
|
* Test that taking another weak ref in this situation works.
|
||||||
|
*/
|
||||||
|
|
||||||
|
g_weak_ref_init (&weak2, object);
|
||||||
|
g_assert_true (object == g_weak_ref_get (&weak2));
|
||||||
|
g_object_unref (object);
|
||||||
|
|
||||||
|
obj2 = g_object_new (G_TYPE_OBJECT, NULL);
|
||||||
|
g_weak_ref_set (&weak2, obj2);
|
||||||
|
g_object_unref (obj2);
|
||||||
|
|
||||||
|
g_assert_null (g_weak_ref_get (&weak2));
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
test_weak_ref_in_toggle_notify (void)
|
||||||
|
{
|
||||||
|
GObject *obj;
|
||||||
|
GWeakRef weak = { { GUINT_TO_POINTER (0xDEADBEEFU) } };
|
||||||
|
|
||||||
|
obj = g_object_new (G_TYPE_OBJECT, NULL);
|
||||||
|
g_object_add_toggle_ref (obj, weak_ref_in_toggle_notify_toggle_cb, NULL);
|
||||||
|
g_object_unref (obj);
|
||||||
|
|
||||||
|
g_weak_ref_init (&weak, obj);
|
||||||
|
|
||||||
|
/* We trigger a toggle notify via g_weak_ref_get(). */
|
||||||
|
g_assert_true (g_weak_ref_get (&weak) == obj);
|
||||||
|
|
||||||
|
g_object_remove_toggle_ref (obj, weak_ref_in_toggle_notify_toggle_cb, NULL);
|
||||||
|
g_object_unref (obj);
|
||||||
|
|
||||||
|
g_assert_null (g_weak_ref_get (&weak));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*****************************************************************************/
|
||||||
|
|
||||||
|
#define CONCURRENT_N_OBJS 5
|
||||||
|
#define CONCURRENT_N_THREADS 5
|
||||||
|
#define CONCURRENT_N_RACES 100
|
||||||
|
|
||||||
|
typedef struct
|
||||||
|
{
|
||||||
|
int TEST_IDX;
|
||||||
|
GObject *objs[CONCURRENT_N_OBJS];
|
||||||
|
int thread_done[CONCURRENT_N_THREADS];
|
||||||
|
} ConcurrentData;
|
||||||
|
|
||||||
|
typedef struct
|
||||||
|
{
|
||||||
|
const ConcurrentData *data;
|
||||||
|
int idx;
|
||||||
|
int race_count;
|
||||||
|
GWeakRef *weak_ref;
|
||||||
|
GRand *rnd;
|
||||||
|
} ConcurrentThreadData;
|
||||||
|
|
||||||
|
static gpointer
|
||||||
|
_test_weak_ref_concurrent_thread_cb (gpointer data)
|
||||||
|
{
|
||||||
|
ConcurrentThreadData *thread_data = data;
|
||||||
|
|
||||||
|
while (TRUE)
|
||||||
|
{
|
||||||
|
gboolean all_done;
|
||||||
|
int i;
|
||||||
|
int r;
|
||||||
|
|
||||||
|
for (r = 0; r < 15; r++)
|
||||||
|
{
|
||||||
|
GObject *obj_allocated = NULL;
|
||||||
|
GObject *obj;
|
||||||
|
GObject *obj2;
|
||||||
|
gboolean got_race;
|
||||||
|
|
||||||
|
/* Choose a random object */
|
||||||
|
obj = thread_data->data->objs[g_rand_int (thread_data->rnd) % CONCURRENT_N_OBJS];
|
||||||
|
if (thread_data->data->TEST_IDX > 0 && (g_rand_int (thread_data->rnd) % 4 == 0))
|
||||||
|
{
|
||||||
|
/* With TEST_IDX>0 also randomly choose NULL or a newly created
|
||||||
|
* object. */
|
||||||
|
if (g_rand_boolean (thread_data->rnd))
|
||||||
|
obj = NULL;
|
||||||
|
else
|
||||||
|
{
|
||||||
|
obj_allocated = g_object_new (G_TYPE_OBJECT, NULL);
|
||||||
|
obj = obj_allocated;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_assert (!obj || G_IS_OBJECT (obj));
|
||||||
|
|
||||||
|
g_weak_ref_set (thread_data->weak_ref, obj);
|
||||||
|
|
||||||
|
/* get the weak-ref. If there is no race, we expect to get the same
|
||||||
|
* object back. */
|
||||||
|
obj2 = g_weak_ref_get (thread_data->weak_ref);
|
||||||
|
|
||||||
|
g_assert (!obj2 || G_IS_OBJECT (obj2));
|
||||||
|
if (!obj2)
|
||||||
|
{
|
||||||
|
g_assert (thread_data->data->TEST_IDX > 0);
|
||||||
|
}
|
||||||
|
if (obj != obj2)
|
||||||
|
{
|
||||||
|
int cnt;
|
||||||
|
|
||||||
|
cnt = 0;
|
||||||
|
for (i = 0; i < CONCURRENT_N_OBJS; i++)
|
||||||
|
{
|
||||||
|
if (obj2 == thread_data->data->objs[i])
|
||||||
|
cnt++;
|
||||||
|
}
|
||||||
|
if (!obj2)
|
||||||
|
g_assert_cmpint (cnt, ==, 0);
|
||||||
|
else if (obj2 && obj2 == obj_allocated)
|
||||||
|
g_assert_cmpint (cnt, ==, 0);
|
||||||
|
else if (thread_data->data->TEST_IDX > 0)
|
||||||
|
g_assert_cmpint (cnt, <=, 1);
|
||||||
|
else
|
||||||
|
g_assert_cmpint (cnt, ==, 1);
|
||||||
|
got_race = TRUE;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
got_race = FALSE;
|
||||||
|
|
||||||
|
g_clear_object (&obj2);
|
||||||
|
g_clear_object (&obj_allocated);
|
||||||
|
|
||||||
|
if (got_race)
|
||||||
|
{
|
||||||
|
/* Each thread should see CONCURRENT_N_RACES before being done.
|
||||||
|
* Count them. */
|
||||||
|
if (g_atomic_int_get (&thread_data->race_count) > CONCURRENT_N_RACES)
|
||||||
|
g_atomic_int_set (&thread_data->data->thread_done[thread_data->idx], 1);
|
||||||
|
else
|
||||||
|
g_atomic_int_add (&thread_data->race_count, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Each thread runs, until all threads saw the expected number of races. */
|
||||||
|
all_done = TRUE;
|
||||||
|
for (i = 0; i < CONCURRENT_N_THREADS; i++)
|
||||||
|
{
|
||||||
|
if (!g_atomic_int_get (&thread_data->data->thread_done[i]))
|
||||||
|
{
|
||||||
|
all_done = FALSE;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (all_done)
|
||||||
|
return GINT_TO_POINTER (1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
test_weak_ref_concurrent (gconstpointer testdata)
|
||||||
|
{
|
||||||
|
const int TEST_IDX = GPOINTER_TO_INT (testdata);
|
||||||
|
GThread *threads[CONCURRENT_N_THREADS];
|
||||||
|
int i;
|
||||||
|
ConcurrentData data = {
|
||||||
|
.TEST_IDX = TEST_IDX,
|
||||||
|
};
|
||||||
|
ConcurrentThreadData thread_data[CONCURRENT_N_THREADS];
|
||||||
|
GWeakRef weak_ref = { 0 };
|
||||||
|
|
||||||
|
/* Let several threads call g_weak_ref_set() & g_weak_ref_get() in a loop. */
|
||||||
|
|
||||||
|
for (i = 0; i < CONCURRENT_N_OBJS; i++)
|
||||||
|
data.objs[i] = g_object_new (G_TYPE_OBJECT, NULL);
|
||||||
|
|
||||||
|
for (i = 0; i < CONCURRENT_N_THREADS; i++)
|
||||||
|
{
|
||||||
|
const guint32 rnd_seed[] = {
|
||||||
|
g_test_rand_int (),
|
||||||
|
g_test_rand_int (),
|
||||||
|
g_test_rand_int (),
|
||||||
|
};
|
||||||
|
|
||||||
|
thread_data[i] = (ConcurrentThreadData){
|
||||||
|
.idx = i,
|
||||||
|
.data = &data,
|
||||||
|
.weak_ref = &weak_ref,
|
||||||
|
.rnd = g_rand_new_with_seed_array (rnd_seed, G_N_ELEMENTS (rnd_seed)),
|
||||||
|
};
|
||||||
|
threads[i] = g_thread_new ("test-weak-ref-concurrent", _test_weak_ref_concurrent_thread_cb, &thread_data[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < CONCURRENT_N_THREADS; i++)
|
||||||
|
{
|
||||||
|
gpointer r;
|
||||||
|
|
||||||
|
r = g_thread_join (g_steal_pointer (&threads[i]));
|
||||||
|
g_assert_cmpint (GPOINTER_TO_INT (r), ==, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < CONCURRENT_N_OBJS; i++)
|
||||||
|
g_object_unref (g_steal_pointer (&data.objs[i]));
|
||||||
|
for (i = 0; i < CONCURRENT_N_THREADS; i++)
|
||||||
|
g_rand_free (g_steal_pointer (&thread_data[i].rnd));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*****************************************************************************/
|
||||||
|
|
||||||
typedef struct
|
typedef struct
|
||||||
{
|
{
|
||||||
gboolean should_be_last;
|
gboolean should_be_last;
|
||||||
@ -1365,6 +1583,9 @@ main (int argc, char **argv)
|
|||||||
g_test_add_func ("/object/weak-ref/on-dispose", test_weak_ref_on_dispose);
|
g_test_add_func ("/object/weak-ref/on-dispose", test_weak_ref_on_dispose);
|
||||||
g_test_add_func ("/object/weak-ref/on-run-dispose", test_weak_ref_on_run_dispose);
|
g_test_add_func ("/object/weak-ref/on-run-dispose", test_weak_ref_on_run_dispose);
|
||||||
g_test_add_func ("/object/weak-ref/on-toggle-notify", test_weak_ref_on_toggle_notify);
|
g_test_add_func ("/object/weak-ref/on-toggle-notify", test_weak_ref_on_toggle_notify);
|
||||||
|
g_test_add_func ("/object/weak-ref/in-toggle-notify", test_weak_ref_in_toggle_notify);
|
||||||
|
g_test_add_data_func ("/object/weak-ref/concurrent/0", GINT_TO_POINTER (0), test_weak_ref_concurrent);
|
||||||
|
g_test_add_data_func ("/object/weak-ref/concurrent/1", GINT_TO_POINTER (1), test_weak_ref_concurrent);
|
||||||
g_test_add_func ("/object/toggle-ref", test_toggle_ref);
|
g_test_add_func ("/object/toggle-ref", test_toggle_ref);
|
||||||
g_test_add_func ("/object/toggle-ref/ref-on-dispose", test_toggle_ref_on_dispose);
|
g_test_add_func ("/object/toggle-ref/ref-on-dispose", test_toggle_ref_on_dispose);
|
||||||
g_test_add_func ("/object/toggle-ref/ref-and-notify-on-dispose", test_toggle_ref_and_notify_on_dispose);
|
g_test_add_func ("/object/toggle-ref/ref-and-notify-on-dispose", test_toggle_ref_and_notify_on_dispose);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user