From: Tim Janik Date: Tue, 22 Nov 2005 13:16:58 +0000 (+0000) Subject: g_hash_table_new_full(): create hash tables with a ref count of 1. X-Git-Tag: GLIB_2_9_1~71 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3e847a090cfd8495add631d43388c461b1a85716;p=platform%2Fupstream%2Fglib.git g_hash_table_new_full(): create hash tables with a ref count of 1. Tue Nov 22 14:04:26 2005 Tim Janik * glib/ghash.h: * glib/ghash.c: g_hash_table_new_full(): create hash tables with a ref count of 1. g_hash_table_ref(): atomically ref_count+=1 g_hash_table_unref(): atomically ref_count-=1, destroys hash table when refcount reaches 0. g_hash_table_destroy(): just destroy keys and values, unref by 1. g_hash_table_insert(): g_hash_table_replace(): assert ref_count>0. * glib/gatomic.h: * glib/gatomic.c: added 'volatile' qualifier to all atomic pointer and integer pointers. --- diff --git a/ChangeLog b/ChangeLog index 7ffd216..9c46313 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,19 @@ +Tue Nov 22 14:04:26 2005 Tim Janik + + * glib/ghash.h: + * glib/ghash.c: + g_hash_table_new_full(): create hash tables with a ref count of 1. + g_hash_table_ref(): atomically ref_count+=1 + g_hash_table_unref(): atomically ref_count-=1, destroys hash table + when refcount reaches 0. + g_hash_table_destroy(): just destroy keys and values, unref by 1. + g_hash_table_insert(): + g_hash_table_replace(): assert ref_count>0. + + * glib/gatomic.h: + * glib/gatomic.c: added 'volatile' qualifier to all atomic pointer and + integer pointers. + 2005-11-20 Behdad Esfahbod * glib/guniprop.c (g_unichar_get_mirror_char): Remove unused diff --git a/ChangeLog.pre-2-10 b/ChangeLog.pre-2-10 index 7ffd216..9c46313 100644 --- a/ChangeLog.pre-2-10 +++ b/ChangeLog.pre-2-10 @@ -1,3 +1,19 @@ +Tue Nov 22 14:04:26 2005 Tim Janik + + * glib/ghash.h: + * glib/ghash.c: + g_hash_table_new_full(): create hash tables with a ref count of 1. + g_hash_table_ref(): atomically ref_count+=1 + g_hash_table_unref(): atomically ref_count-=1, destroys hash table + when refcount reaches 0. + g_hash_table_destroy(): just destroy keys and values, unref by 1. + g_hash_table_insert(): + g_hash_table_replace(): assert ref_count>0. + + * glib/gatomic.h: + * glib/gatomic.c: added 'volatile' qualifier to all atomic pointer and + integer pointers. + 2005-11-20 Behdad Esfahbod * glib/guniprop.c (g_unichar_get_mirror_char): Remove unused diff --git a/ChangeLog.pre-2-12 b/ChangeLog.pre-2-12 index 7ffd216..9c46313 100644 --- a/ChangeLog.pre-2-12 +++ b/ChangeLog.pre-2-12 @@ -1,3 +1,19 @@ +Tue Nov 22 14:04:26 2005 Tim Janik + + * glib/ghash.h: + * glib/ghash.c: + g_hash_table_new_full(): create hash tables with a ref count of 1. + g_hash_table_ref(): atomically ref_count+=1 + g_hash_table_unref(): atomically ref_count-=1, destroys hash table + when refcount reaches 0. + g_hash_table_destroy(): just destroy keys and values, unref by 1. + g_hash_table_insert(): + g_hash_table_replace(): assert ref_count>0. + + * glib/gatomic.h: + * glib/gatomic.c: added 'volatile' qualifier to all atomic pointer and + integer pointers. + 2005-11-20 Behdad Esfahbod * glib/guniprop.c (g_unichar_get_mirror_char): Remove unused diff --git a/glib/gatomic.c b/glib/gatomic.c index eb9bec3..afc5329 100644 --- a/glib/gatomic.c +++ b/glib/gatomic.c @@ -31,8 +31,8 @@ /* Adapted from CVS version 1.10 of glibc's sysdeps/i386/i486/bits/atomic.h */ gint -g_atomic_int_exchange_and_add (gint *atomic, - gint val) +g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val) { gint result; @@ -43,8 +43,8 @@ g_atomic_int_exchange_and_add (gint *atomic, } void -g_atomic_int_add (gint *atomic, - gint val) +g_atomic_int_add (volatile gint *atomic, + gint val) { __asm__ __volatile__ ("lock; addl %1,%0" : "=m" (*atomic) @@ -52,9 +52,9 @@ g_atomic_int_add (gint *atomic, } gboolean -g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval) +g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval) { gint result; @@ -70,9 +70,9 @@ g_atomic_int_compare_and_exchange (gint *atomic, * arguments and calling the former function */ gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result; @@ -98,9 +98,9 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */ gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result; __asm__ __volatile__ ("cas [%4], %2, %0" @@ -111,9 +111,9 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, } # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */ gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result; gpointer *a = atomic; @@ -157,9 +157,9 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, }) # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */ gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gint result; gpointer prev; @@ -183,9 +183,9 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, } # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */ gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gint result; gpointer prev; @@ -215,8 +215,8 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, /* Adapted from CVS version 1.9 of glibc's sysdeps/x86_64/bits/atomic.h */ gint -g_atomic_int_exchange_and_add (gint *atomic, - gint val) +g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val) { gint result; @@ -227,8 +227,8 @@ g_atomic_int_exchange_and_add (gint *atomic, } void -g_atomic_int_add (gint *atomic, - gint val) +g_atomic_int_add (volatile gint *atomic, + gint val) { __asm__ __volatile__ ("lock; addl %1,%0" : "=m" (*atomic) @@ -236,9 +236,9 @@ g_atomic_int_add (gint *atomic, } gboolean -g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval) +g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval) { gint result; @@ -250,9 +250,9 @@ g_atomic_int_compare_and_exchange (gint *atomic, } gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result; @@ -272,8 +272,8 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, /* Non-optimizing compile bails on the following two asm statements * for reasons unknown to the author */ gint -g_atomic_int_exchange_and_add (gint *atomic, - gint val) +g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val) { gint result, temp; __asm__ __volatile__ ("1: lwarx %0,0,%3\n" @@ -288,8 +288,8 @@ g_atomic_int_exchange_and_add (gint *atomic, /* The same as above, to save a function call repeated here */ void -g_atomic_int_add (gint *atomic, - gint val) +g_atomic_int_add (volatile gint *atomic, + gint val) { gint result, temp; __asm__ __volatile__ ("1: lwarx %0,0,%3\n" @@ -302,8 +302,8 @@ g_atomic_int_add (gint *atomic, } # else /* !__OPTIMIZE__ */ gint -g_atomic_int_exchange_and_add (gint *atomic, - gint val) +g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val) { gint result; do @@ -314,8 +314,8 @@ g_atomic_int_exchange_and_add (gint *atomic, } void -g_atomic_int_add (gint *atomic, - gint val) +g_atomic_int_add (volatile gint *atomic, + gint val) { gint result; do @@ -326,9 +326,9 @@ g_atomic_int_add (gint *atomic, # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */ gboolean -g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval) +g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval) { gint result; __asm__ __volatile__ ("sync\n" @@ -345,9 +345,9 @@ g_atomic_int_compare_and_exchange (gint *atomic, } gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result; __asm__ __volatile__ ("sync\n" @@ -364,9 +364,9 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, } # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */ gboolean -g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval) +g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval) { gpointer result; __asm__ __volatile__ ("sync\n" @@ -384,9 +384,9 @@ g_atomic_int_compare_and_exchange (gint *atomic, } gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result; __asm__ __volatile__ ("sync\n" @@ -411,8 +411,8 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, /* Adapted from CVS version 1.8 of glibc's sysdeps/ia64/bits/atomic.h */ gint -g_atomic_int_exchange_and_add (gint *atomic, - gint val) +g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val) { return __sync_fetch_and_add_si (atomic, val); } @@ -425,17 +425,17 @@ g_atomic_int_add (gint *atomic, } gboolean -g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval) +g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval) { return __sync_bool_compare_and_swap_si (atomic, oldval, newval); } gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { return __sync_bool_compare_and_swap_di ((long *)atomic, (long)oldval, (long)newval); @@ -456,9 +456,9 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */ gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result = oldval; __asm__ __volatile__ ("cs %0, %2, %1" @@ -468,9 +468,9 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, } # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */ gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gpointer result = oldval; gpointer *a = atomic; @@ -496,33 +496,33 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, #ifdef DEFINE_WITH_WIN32_INTERLOCKED # include gint32 -g_atomic_int_exchange_and_add (gint32 *atomic, - gint32 val) +g_atomic_int_exchange_and_add (volatile gint32 *atomic, + gint32 val) { return InterlockedExchangeAdd (atomic, val); } void -g_atomic_int_add (gint32 *atomic, - gint32 val) +g_atomic_int_add (volatile gint32 *atomic, + gint32 val) { InterlockedExchangeAdd (atomic, val); } gboolean -g_atomic_int_compare_and_exchange (gint32 *atomic, - gint32 oldval, - gint32 newval) +g_atomic_int_compare_and_exchange (volatile gint32 *atomic, + gint32 oldval, + gint32 newval) { - return (guint32)InterlockedCompareExchange ((PVOID*)atomic, - (PVOID)newval, - (PVOID)oldval) == oldval; + return (guint32) InterlockedCompareExchange ((PVOID*)atomic, + (PVOID)newval, + (PVOID)oldval) == oldval; } gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { # if GLIB_SIZEOF_VOID_P != 4 /* no 32-bit system */ # error "InterlockedCompareExchangePointer needed" @@ -537,8 +537,8 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, static GMutex *g_atomic_mutex; gint -g_atomic_int_exchange_and_add (gint *atomic, - gint val) +g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val) { gint result; @@ -552,8 +552,8 @@ g_atomic_int_exchange_and_add (gint *atomic, void -g_atomic_int_add (gint *atomic, - gint val) +g_atomic_int_add (volatile gint *atomic, + gint val) { g_mutex_lock (g_atomic_mutex); *atomic += val; @@ -561,9 +561,9 @@ g_atomic_int_add (gint *atomic, } gboolean -g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval) +g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval) { gboolean result; @@ -581,9 +581,9 @@ g_atomic_int_compare_and_exchange (gint *atomic, } gboolean -g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval) +g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval) { gboolean result; @@ -602,7 +602,7 @@ g_atomic_pointer_compare_and_exchange (gpointer *atomic, #ifdef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED gint -g_atomic_int_get (gint *atomic) +g_atomic_int_get (volatile gint *atomic) { gint result; @@ -614,7 +614,7 @@ g_atomic_int_get (gint *atomic) } gpointer -g_atomic_pointer_get (gpointer *atomic) +g_atomic_pointer_get (volatile gpointer *atomic) { gpointer result; @@ -627,7 +627,7 @@ g_atomic_pointer_get (gpointer *atomic) #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */ #elif defined (G_ATOMIC_OP_MEMORY_BARRIER_NEEDED) gint -g_atomic_int_get (gint *atomic) +g_atomic_int_get (volatile gint *atomic) { gint result = *atomic; @@ -637,7 +637,7 @@ g_atomic_int_get (gint *atomic) } gpointer -g_atomic_pointer_get (gpointer *atomic) +g_atomic_pointer_get (volatile gpointer *atomic) { gpointer result = *atomic; @@ -649,16 +649,16 @@ g_atomic_pointer_get (gpointer *atomic) #ifdef ATOMIC_INT_CMP_XCHG gboolean -g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval) +g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval) { return ATOMIC_INT_CMP_XCHG (atomic, oldval, newval); } gint -g_atomic_int_exchange_and_add (gint *atomic, - gint val) +g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val) { gint result; do @@ -669,8 +669,8 @@ g_atomic_int_exchange_and_add (gint *atomic, } void -g_atomic_int_add (gint *atomic, - gint val) +g_atomic_int_add (volatile gint *atomic, + gint val) { gint result; do @@ -689,13 +689,13 @@ _g_atomic_thread_init (void) #ifndef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED gint -(g_atomic_int_get) (gint *atomic) +(g_atomic_int_get) (volatile gint *atomic) { return g_atomic_int_get (atomic); } gpointer -(g_atomic_pointer_get) (gpointer *atomic) +(g_atomic_pointer_get) (volatile gpointer *atomic) { return g_atomic_pointer_get (atomic); } diff --git a/glib/gatomic.h b/glib/gatomic.h index 4e050a2..ab8c2d9 100644 --- a/glib/gatomic.h +++ b/glib/gatomic.h @@ -34,19 +34,19 @@ G_BEGIN_DECLS -gint g_atomic_int_exchange_and_add (gint *atomic, - gint val); -void g_atomic_int_add (gint *atomic, - gint val); -gboolean g_atomic_int_compare_and_exchange (gint *atomic, - gint oldval, - gint newval); -gboolean g_atomic_pointer_compare_and_exchange (gpointer *atomic, - gpointer oldval, - gpointer newval); +gint g_atomic_int_exchange_and_add (volatile gint *atomic, + gint val); +void g_atomic_int_add (volatile gint *atomic, + gint val); +gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic, + gint oldval, + gint newval); +gboolean g_atomic_pointer_compare_and_exchange (volatile gpointer *atomic, + gpointer oldval, + gpointer newval); -gint g_atomic_int_get (gint *atomic); -gpointer g_atomic_pointer_get (gpointer *atomic); +gint g_atomic_int_get (volatile gint *atomic); +gpointer g_atomic_pointer_get (volatile gpointer *atomic); #ifndef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED # define g_atomic_int_get(atomic) (*(atomic)) diff --git a/glib/ghash.c b/glib/ghash.c index debb6c9..41ce0db 100644 --- a/glib/ghash.c +++ b/glib/ghash.c @@ -54,6 +54,7 @@ struct _GHashTable GHashNode **nodes; GHashFunc hash_func; GEqualFunc key_equal_func; + volatile guint ref_count; GDestroyNotify key_destroy_func; GDestroyNotify value_destroy_func; }; @@ -98,7 +99,7 @@ static guint g_hash_table_foreach_remove_or_steal (GHashTable *hash_table, * directly in a similar fashion to g_direct_equal(), but without the * overhead of a function call. * - * Creates a new #GHashTable. + * Creates a new #GHashTable with a reference count of 1. * * Return value: a new #GHashTable. **/ @@ -121,9 +122,9 @@ g_hash_table_new (GHashFunc hash_func, * value used when removing the entry from the #GHashTable or %NULL if * you don't want to supply such a function. * - * Creates a new #GHashTable like g_hash_table_new() and allows to specify - * functions to free the memory allocated for the key and value that get - * called when removing the entry from the #GHashTable. + * Creates a new #GHashTable like g_hash_table_new() with a reference count + * of 1 and allows to specify functions to free the memory allocated for the + * key and value that get called when removing the entry from the #GHashTable. * * Return value: a new #GHashTable. **/ @@ -134,32 +135,77 @@ g_hash_table_new_full (GHashFunc hash_func, GDestroyNotify value_destroy_func) { GHashTable *hash_table; - guint i; hash_table = g_slice_new (GHashTable); hash_table->size = HASH_TABLE_MIN_SIZE; hash_table->nnodes = 0; hash_table->hash_func = hash_func ? hash_func : g_direct_hash; hash_table->key_equal_func = key_equal_func; + hash_table->ref_count = 1; hash_table->key_destroy_func = key_destroy_func; hash_table->value_destroy_func = value_destroy_func; - hash_table->nodes = g_new (GHashNode*, hash_table->size); - - for (i = 0; i < hash_table->size; i++) - hash_table->nodes[i] = NULL; + hash_table->nodes = g_new0 (GHashNode*, hash_table->size); return hash_table; } + +/** + * g_hash_table_ref: + * @hash_table: a valid #GHashTable. + * + * Atomically increments the reference count of @hash_table by one. + * This function is MT-safe and may be called from any thread. + * + * Return value: the passed in #GHashTable. + **/ +GHashTable* +g_hash_table_ref (GHashTable *hash_table) +{ + g_return_val_if_fail (hash_table != NULL, NULL); + g_return_val_if_fail (hash_table->ref_count > 0, hash_table); + + g_atomic_int_add (&hash_table->ref_count, 1); + return hash_table; +} + +/** + * g_hash_table_unref: + * @hash_table: a valid #GHashTable. + * + * Atomically decrements the reference count of @hash_table by one. + * If the reference count drops to 0, all keys and values will be + * destroyed, and all memory allocated by the hash table is released. + * This function is MT-safe and may be called from any thread. + **/ +void +g_hash_table_unref (GHashTable *hash_table) +{ + g_return_if_fail (hash_table != NULL); + g_return_if_fail (hash_table->ref_count > 0); + + if (g_atomic_int_exchange_and_add (&hash_table->ref_count, -1) - 1 == 0) + { + guint i; + for (i = 0; i < hash_table->size; i++) + g_hash_nodes_destroy (hash_table->nodes[i], + hash_table->key_destroy_func, + hash_table->value_destroy_func); + g_free (hash_table->nodes); + g_slice_free (GHashTable, hash_table); + } +} + /** * g_hash_table_destroy: * @hash_table: a #GHashTable. * - * Destroys the #GHashTable. If keys and/or values are dynamically - * allocated, you should either free them first or create the #GHashTable - * using g_hash_table_new_full(). In the latter case the destroy functions - * you supplied will be called on all keys and values before destroying - * the #GHashTable. + * Destroys all keys and values in the #GHashTable and decrements it's + * reference count by 1. If keys and/or values are dynamically allocated, + * you should either free them first or create the #GHashTable with destroy + * notifiers using g_hash_table_new_full(). In the latter case the destroy + * functions you supplied will be called on all keys and values during the + * destruction phase. **/ void g_hash_table_destroy (GHashTable *hash_table) @@ -167,14 +213,19 @@ g_hash_table_destroy (GHashTable *hash_table) guint i; g_return_if_fail (hash_table != NULL); + g_return_if_fail (hash_table->ref_count > 0); for (i = 0; i < hash_table->size; i++) - g_hash_nodes_destroy (hash_table->nodes[i], - hash_table->key_destroy_func, - hash_table->value_destroy_func); - - g_free (hash_table->nodes); - g_slice_free (GHashTable, hash_table); + { + g_hash_nodes_destroy (hash_table->nodes[i], + hash_table->key_destroy_func, + hash_table->value_destroy_func); + hash_table->nodes[i] = NULL; + } + hash_table->nnodes = 0; + hash_table->size = HASH_TABLE_MIN_SIZE; + + g_hash_table_unref (hash_table); } static inline GHashNode** @@ -286,6 +337,7 @@ g_hash_table_insert (GHashTable *hash_table, GHashNode **node; g_return_if_fail (hash_table != NULL); + g_return_if_fail (hash_table->ref_count > 0); node = g_hash_table_lookup_node (hash_table, key); @@ -334,6 +386,7 @@ g_hash_table_replace (GHashTable *hash_table, GHashNode **node; g_return_if_fail (hash_table != NULL); + g_return_if_fail (hash_table->ref_count > 0); node = g_hash_table_lookup_node (hash_table, key); @@ -686,5 +739,6 @@ g_hash_nodes_destroy (GHashNode *hash_node, } } + #define __G_HASH_C__ #include "galiasdef.c" diff --git a/glib/ghash.h b/glib/ghash.h index 8e24342..6d301b2 100644 --- a/glib/ghash.h +++ b/glib/ghash.h @@ -65,7 +65,7 @@ gboolean g_hash_table_lookup_extended (GHashTable *hash_table, void g_hash_table_foreach (GHashTable *hash_table, GHFunc func, gpointer user_data); -gpointer g_hash_table_find (GHashTable *hash_table, +gpointer g_hash_table_find (GHashTable *hash_table, GHRFunc predicate, gpointer user_data); guint g_hash_table_foreach_remove (GHashTable *hash_table, @@ -76,6 +76,10 @@ guint g_hash_table_foreach_steal (GHashTable *hash_table, gpointer user_data); guint g_hash_table_size (GHashTable *hash_table); +/* keeping hash tables alive */ +GHashTable* g_hash_table_ref (GHashTable *hash_table); +void g_hash_table_unref (GHashTable *hash_table); + #ifndef G_DISABLE_DEPRECATED /* The following two functions are deprecated and will be removed in diff --git a/glib/glib.symbols b/glib/glib.symbols index a920d03..1cab86f 100644 --- a/glib/glib.symbols +++ b/glib/glib.symbols @@ -287,6 +287,8 @@ g_mkstemp_utf8 #if IN_HEADER(__G_HASH_H__) #if IN_FILE(__G_HASH_C__) g_hash_table_destroy +g_hash_table_unref +g_hash_table_ref g_hash_table_find g_hash_table_foreach g_hash_table_foreach_remove