/* Adapted from CVS version 1.10 of glibc's sysdeps/i386/i486/bits/atomic.h
*/
gint
-g_atomic_int_exchange_and_add (gint *atomic,
- gint val)
+g_atomic_int_exchange_and_add (volatile gint *atomic,
+ gint val)
{
gint result;
}
void
-g_atomic_int_add (gint *atomic,
- gint val)
+g_atomic_int_add (volatile gint *atomic,
+ gint val)
{
__asm__ __volatile__ ("lock; addl %1,%0"
: "=m" (*atomic)
}
gboolean
-g_atomic_int_compare_and_exchange (gint *atomic,
- gint oldval,
- gint newval)
+g_atomic_int_compare_and_exchange (volatile gint *atomic,
+ gint oldval,
+ gint newval)
{
gint result;
* arguments and calling the former function */
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result;
# if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result;
__asm__ __volatile__ ("cas [%4], %2, %0"
}
# elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result;
gpointer *a = atomic;
})
# if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gint result;
gpointer prev;
}
# elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gint result;
gpointer prev;
/* Adapted from CVS version 1.9 of glibc's sysdeps/x86_64/bits/atomic.h
*/
gint
-g_atomic_int_exchange_and_add (gint *atomic,
- gint val)
+g_atomic_int_exchange_and_add (volatile gint *atomic,
+ gint val)
{
gint result;
}
void
-g_atomic_int_add (gint *atomic,
- gint val)
+g_atomic_int_add (volatile gint *atomic,
+ gint val)
{
__asm__ __volatile__ ("lock; addl %1,%0"
: "=m" (*atomic)
}
gboolean
-g_atomic_int_compare_and_exchange (gint *atomic,
- gint oldval,
- gint newval)
+g_atomic_int_compare_and_exchange (volatile gint *atomic,
+ gint oldval,
+ gint newval)
{
gint result;
}
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result;
/* Non-optimizing compile bails on the following two asm statements
* for reasons unknown to the author */
gint
-g_atomic_int_exchange_and_add (gint *atomic,
- gint val)
+g_atomic_int_exchange_and_add (volatile gint *atomic,
+ gint val)
{
gint result, temp;
__asm__ __volatile__ ("1: lwarx %0,0,%3\n"
/* The same as above, to save a function call repeated here */
void
-g_atomic_int_add (gint *atomic,
- gint val)
+g_atomic_int_add (volatile gint *atomic,
+ gint val)
{
gint result, temp;
__asm__ __volatile__ ("1: lwarx %0,0,%3\n"
}
# else /* !__OPTIMIZE__ */
gint
-g_atomic_int_exchange_and_add (gint *atomic,
- gint val)
+g_atomic_int_exchange_and_add (volatile gint *atomic,
+ gint val)
{
gint result;
do
}
void
-g_atomic_int_add (gint *atomic,
- gint val)
+g_atomic_int_add (volatile gint *atomic,
+ gint val)
{
gint result;
do
# if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
gboolean
-g_atomic_int_compare_and_exchange (gint *atomic,
- gint oldval,
- gint newval)
+g_atomic_int_compare_and_exchange (volatile gint *atomic,
+ gint oldval,
+ gint newval)
{
gint result;
__asm__ __volatile__ ("sync\n"
}
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result;
__asm__ __volatile__ ("sync\n"
}
# elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
gboolean
-g_atomic_int_compare_and_exchange (gint *atomic,
- gint oldval,
- gint newval)
+g_atomic_int_compare_and_exchange (volatile gint *atomic,
+ gint oldval,
+ gint newval)
{
gpointer result;
__asm__ __volatile__ ("sync\n"
}
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result;
__asm__ __volatile__ ("sync\n"
/* Adapted from CVS version 1.8 of glibc's sysdeps/ia64/bits/atomic.h
*/
gint
-g_atomic_int_exchange_and_add (gint *atomic,
- gint val)
+g_atomic_int_exchange_and_add (volatile gint *atomic,
+ gint val)
{
return __sync_fetch_and_add_si (atomic, val);
}
}
gboolean
-g_atomic_int_compare_and_exchange (gint *atomic,
- gint oldval,
- gint newval)
+g_atomic_int_compare_and_exchange (volatile gint *atomic,
+ gint oldval,
+ gint newval)
{
return __sync_bool_compare_and_swap_si (atomic, oldval, newval);
}
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
return __sync_bool_compare_and_swap_di ((long *)atomic,
(long)oldval, (long)newval);
# if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result = oldval;
__asm__ __volatile__ ("cs %0, %2, %1"
}
# elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gpointer result = oldval;
gpointer *a = atomic;
#ifdef DEFINE_WITH_WIN32_INTERLOCKED
# include <windows.h>
gint32
-g_atomic_int_exchange_and_add (gint32 *atomic,
- gint32 val)
+g_atomic_int_exchange_and_add (volatile gint32 *atomic,
+ gint32 val)
{
return InterlockedExchangeAdd (atomic, val);
}
void
-g_atomic_int_add (gint32 *atomic,
- gint32 val)
+g_atomic_int_add (volatile gint32 *atomic,
+ gint32 val)
{
InterlockedExchangeAdd (atomic, val);
}
gboolean
-g_atomic_int_compare_and_exchange (gint32 *atomic,
- gint32 oldval,
- gint32 newval)
+g_atomic_int_compare_and_exchange (volatile gint32 *atomic,
+ gint32 oldval,
+ gint32 newval)
{
- return (guint32)InterlockedCompareExchange ((PVOID*)atomic,
- (PVOID)newval,
- (PVOID)oldval) == oldval;
+ return (guint32) InterlockedCompareExchange ((PVOID*)atomic,
+ (PVOID)newval,
+ (PVOID)oldval) == oldval;
}
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
# if GLIB_SIZEOF_VOID_P != 4 /* no 32-bit system */
# error "InterlockedCompareExchangePointer needed"
static GMutex *g_atomic_mutex;
gint
-g_atomic_int_exchange_and_add (gint *atomic,
- gint val)
+g_atomic_int_exchange_and_add (volatile gint *atomic,
+ gint val)
{
gint result;
void
-g_atomic_int_add (gint *atomic,
- gint val)
+g_atomic_int_add (volatile gint *atomic,
+ gint val)
{
g_mutex_lock (g_atomic_mutex);
*atomic += val;
}
gboolean
-g_atomic_int_compare_and_exchange (gint *atomic,
- gint oldval,
- gint newval)
+g_atomic_int_compare_and_exchange (volatile gint *atomic,
+ gint oldval,
+ gint newval)
{
gboolean result;
}
gboolean
-g_atomic_pointer_compare_and_exchange (gpointer *atomic,
- gpointer oldval,
- gpointer newval)
+g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic,
+ gpointer oldval,
+ gpointer newval)
{
gboolean result;
#ifdef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
gint
-g_atomic_int_get (gint *atomic)
+g_atomic_int_get (volatile gint *atomic)
{
gint result;
}
gpointer
-g_atomic_pointer_get (gpointer *atomic)
+g_atomic_pointer_get (volatile gpointer *atomic)
{
gpointer result;
#endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
#elif defined (G_ATOMIC_OP_MEMORY_BARRIER_NEEDED)
gint
-g_atomic_int_get (gint *atomic)
+g_atomic_int_get (volatile gint *atomic)
{
gint result = *atomic;
}
gpointer
-g_atomic_pointer_get (gpointer *atomic)
+g_atomic_pointer_get (volatile gpointer *atomic)
{
gpointer result = *atomic;
#ifdef ATOMIC_INT_CMP_XCHG
gboolean
-g_atomic_int_compare_and_exchange (gint *atomic,
- gint oldval,
- gint newval)
+g_atomic_int_compare_and_exchange (volatile gint *atomic,
+ gint oldval,
+ gint newval)
{
return ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
}
gint
-g_atomic_int_exchange_and_add (gint *atomic,
- gint val)
+g_atomic_int_exchange_and_add (volatile gint *atomic,
+ gint val)
{
gint result;
do
}
void
-g_atomic_int_add (gint *atomic,
- gint val)
+g_atomic_int_add (volatile gint *atomic,
+ gint val)
{
gint result;
do
#ifndef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
gint
-(g_atomic_int_get) (gint *atomic)
+(g_atomic_int_get) (volatile gint *atomic)
{
return g_atomic_int_get (atomic);
}
gpointer
-(g_atomic_pointer_get) (gpointer *atomic)
+(g_atomic_pointer_get) (volatile gpointer *atomic)
{
return g_atomic_pointer_get (atomic);
}
GHashNode **nodes;
GHashFunc hash_func;
GEqualFunc key_equal_func;
+ volatile guint ref_count;
GDestroyNotify key_destroy_func;
GDestroyNotify value_destroy_func;
};
* directly in a similar fashion to g_direct_equal(), but without the
* overhead of a function call.
*
- * Creates a new #GHashTable.
+ * Creates a new #GHashTable with a reference count of 1.
*
* Return value: a new #GHashTable.
**/
* value used when removing the entry from the #GHashTable or %NULL if
* you don't want to supply such a function.
*
- * Creates a new #GHashTable like g_hash_table_new() and allows to specify
- * functions to free the memory allocated for the key and value that get
- * called when removing the entry from the #GHashTable.
+ * Creates a new #GHashTable like g_hash_table_new() with a reference count
+ * of 1 and allows to specify functions to free the memory allocated for the
+ * key and value that get called when removing the entry from the #GHashTable.
*
* Return value: a new #GHashTable.
**/
GDestroyNotify value_destroy_func)
{
GHashTable *hash_table;
- guint i;
hash_table = g_slice_new (GHashTable);
hash_table->size = HASH_TABLE_MIN_SIZE;
hash_table->nnodes = 0;
hash_table->hash_func = hash_func ? hash_func : g_direct_hash;
hash_table->key_equal_func = key_equal_func;
+ hash_table->ref_count = 1;
hash_table->key_destroy_func = key_destroy_func;
hash_table->value_destroy_func = value_destroy_func;
- hash_table->nodes = g_new (GHashNode*, hash_table->size);
-
- for (i = 0; i < hash_table->size; i++)
- hash_table->nodes[i] = NULL;
+ hash_table->nodes = g_new0 (GHashNode*, hash_table->size);
return hash_table;
}
+
+/**
+ * g_hash_table_ref:
+ * @hash_table: a valid #GHashTable.
+ *
+ * Atomically increments the reference count of @hash_table by one.
+ * This function is MT-safe and may be called from any thread.
+ *
+ * Return value: the passed in #GHashTable.
+ **/
+GHashTable*
+g_hash_table_ref (GHashTable *hash_table)
+{
+ g_return_val_if_fail (hash_table != NULL, NULL);
+ g_return_val_if_fail (hash_table->ref_count > 0, hash_table);
+
+ g_atomic_int_add (&hash_table->ref_count, 1);
+ return hash_table;
+}
+
+/**
+ * g_hash_table_unref:
+ * @hash_table: a valid #GHashTable.
+ *
+ * Atomically decrements the reference count of @hash_table by one.
+ * If the reference count drops to 0, all keys and values will be
+ * destroyed, and all memory allocated by the hash table is released.
+ * This function is MT-safe and may be called from any thread.
+ **/
+void
+g_hash_table_unref (GHashTable *hash_table)
+{
+ g_return_if_fail (hash_table != NULL);
+ g_return_if_fail (hash_table->ref_count > 0);
+
+ if (g_atomic_int_exchange_and_add (&hash_table->ref_count, -1) - 1 == 0)
+ {
+ guint i;
+ for (i = 0; i < hash_table->size; i++)
+ g_hash_nodes_destroy (hash_table->nodes[i],
+ hash_table->key_destroy_func,
+ hash_table->value_destroy_func);
+ g_free (hash_table->nodes);
+ g_slice_free (GHashTable, hash_table);
+ }
+}
+
/**
* g_hash_table_destroy:
* @hash_table: a #GHashTable.
*
- * Destroys the #GHashTable. If keys and/or values are dynamically
- * allocated, you should either free them first or create the #GHashTable
- * using g_hash_table_new_full(). In the latter case the destroy functions
- * you supplied will be called on all keys and values before destroying
- * the #GHashTable.
+ * Destroys all keys and values in the #GHashTable and decrements it's
+ * reference count by 1. If keys and/or values are dynamically allocated,
+ * you should either free them first or create the #GHashTable with destroy
+ * notifiers using g_hash_table_new_full(). In the latter case the destroy
+ * functions you supplied will be called on all keys and values during the
+ * destruction phase.
**/
void
g_hash_table_destroy (GHashTable *hash_table)
guint i;
g_return_if_fail (hash_table != NULL);
+ g_return_if_fail (hash_table->ref_count > 0);
for (i = 0; i < hash_table->size; i++)
- g_hash_nodes_destroy (hash_table->nodes[i],
- hash_table->key_destroy_func,
- hash_table->value_destroy_func);
-
- g_free (hash_table->nodes);
- g_slice_free (GHashTable, hash_table);
+ {
+ g_hash_nodes_destroy (hash_table->nodes[i],
+ hash_table->key_destroy_func,
+ hash_table->value_destroy_func);
+ hash_table->nodes[i] = NULL;
+ }
+ hash_table->nnodes = 0;
+ hash_table->size = HASH_TABLE_MIN_SIZE;
+
+ g_hash_table_unref (hash_table);
}
static inline GHashNode**
GHashNode **node;
g_return_if_fail (hash_table != NULL);
+ g_return_if_fail (hash_table->ref_count > 0);
node = g_hash_table_lookup_node (hash_table, key);
GHashNode **node;
g_return_if_fail (hash_table != NULL);
+ g_return_if_fail (hash_table->ref_count > 0);
node = g_hash_table_lookup_node (hash_table, key);
}
}
+
#define __G_HASH_C__
#include "galiasdef.c"