Found and fixed a couple bugs.
Found a couple multithreading issues. Marked them with "XXX-MT-bug".
#define hb_atomic_int_impl_set_relaxed(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELAXED)
#define hb_atomic_int_impl_get_relaxed(AI) __atomic_load_n ((AI), __ATOMIC_RELAXED)
+#define hb_atomic_ptr_impl_set_relaxed(P, V) __atomic_store_n ((P), (V), __ATOMIC_RELAXED)
+#define hb_atomic_ptr_impl_get_relaxed(P) __atomic_load_n ((P), __ATOMIC_RELAXED)
#define hb_atomic_ptr_impl_get(P) __atomic_load_n ((P), __ATOMIC_CONSUME)
static inline bool
_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
#define hb_atomic_int_impl_set_relaxed(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_relaxed))
#define hb_atomic_int_impl_get_relaxed(AI) (reinterpret_cast<std::atomic<int> *> (AI)->load (std::memory_order_relaxed))
+#define hb_atomic_ptr_impl_set_relaxed(P, V) (reinterpret_cast<std::atomic<void*> *> (P)->store ((V), std::memory_order_relaxed))
+#define hb_atomic_ptr_impl_get_relaxed(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_relaxed))
#define hb_atomic_ptr_impl_get(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_consume))
static inline bool
_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
#ifndef hb_atomic_int_impl_get_relaxed
#define hb_atomic_int_impl_get_relaxed(AI) (*(AI))
#endif
+
+#ifndef hb_atomic_ptr_impl_set_relaxed
+#define hb_atomic_ptr_impl_set_relaxed(P, V) (*(P) = (V))
+#endif
+#ifndef hb_atomic_ptr_impl_get_relaxed
+#define hb_atomic_ptr_impl_get_relaxed(P) (*(P))
+#endif
#ifndef hb_atomic_ptr_impl_get
inline void *hb_atomic_ptr_impl_get (void **P) { void *v = *P; _hb_memory_r_barrier (); return v; }
#endif
#define HB_ATOMIC_INT_INIT(V) {V}
struct hb_atomic_int_t
{
- inline void set_relaxed (int v_) { hb_atomic_int_impl_set_relaxed (&v, v_); }
+ inline void set_relaxed (int v_) const { hb_atomic_int_impl_set_relaxed (&v, v_); }
inline int get_relaxed (void) const { return hb_atomic_int_impl_get_relaxed (&v); }
inline int inc (void) { return hb_atomic_int_impl_add (&v, 1); }
inline int dec (void) { return hb_atomic_int_impl_add (&v, -1); }
};
-#define hb_atomic_ptr_get(P) hb_atomic_ptr_impl_get((void **) P)
-#define hb_atomic_ptr_cmpexch(P,O,N) hb_atomic_ptr_impl_cmpexch((P),(O),(N))
+template <typename T> struct hb_remove_ptr_t { typedef T value; };
+template <typename T> struct hb_remove_ptr_t<T *> { typedef T value; };
+
+#define HB_ATOMIC_PTR_INIT(V) {V}
+template <typename P>
+struct hb_atomic_ptr_t
+{
+ typedef typename hb_remove_ptr_t<P>::value T;
+
+ inline void init (T* v_ = nullptr) { set_relaxed (v_); }
+ inline void set_relaxed (T* v_) const { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
+ inline T *get_relaxed (void) const { return hb_atomic_ptr_impl_get_relaxed (&v); }
+ inline T *get (void) const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); }
+ inline bool cmpexch (const T *old, T *new_) const{ return hb_atomic_ptr_impl_cmpexch (&v, old, new_); }
+
+ inline T* operator -> (void) const { return get (); }
+
+ mutable T *v;
+};
#endif /* HB_ATOMIC_PRIVATE_HH */
/* Thread-safe lock-free language list */
-static hb_language_item_t *langs;
+static hb_atomic_ptr_t <hb_language_item_t> langs;
#ifdef HB_USE_ATEXIT
static void
free_langs (void)
{
retry:
- hb_language_item_t *first_lang = (hb_language_item_t *) hb_atomic_ptr_get (&langs);
- if (!hb_atomic_ptr_cmpexch (&langs, first_lang, nullptr))
+ hb_language_item_t *first_lang = langs.get ();
+ if (unlikely (!langs.cmpexch (first_lang, nullptr)))
goto retry;
while (first_lang) {
lang_find_or_insert (const char *key)
{
retry:
- hb_language_item_t *first_lang = (hb_language_item_t *) hb_atomic_ptr_get (&langs);
+ hb_language_item_t *first_lang = langs.get ();
for (hb_language_item_t *lang = first_lang; lang; lang = lang->next)
if (*lang == key)
return nullptr;
}
- if (!hb_atomic_ptr_cmpexch (&langs, first_lang, lang)) {
+ if (unlikely (!langs.cmpexch (first_lang, lang)))
+ {
lang->fini ();
free (lang);
goto retry;
hb_language_t
hb_language_get_default (void)
{
- static hb_language_t default_language = HB_LANGUAGE_INVALID;
+ static hb_atomic_ptr_t <hb_language_t> default_language;
- hb_language_t language = (hb_language_t) hb_atomic_ptr_get (&default_language);
- if (unlikely (language == HB_LANGUAGE_INVALID)) {
+ hb_language_t language = default_language.get ();
+ if (unlikely (language == HB_LANGUAGE_INVALID))
+ {
language = hb_language_from_string (setlocale (LC_CTYPE, nullptr), -1);
- (void) hb_atomic_ptr_cmpexch (&default_language, HB_LANGUAGE_INVALID, language);
+ (void) default_language.cmpexch (HB_LANGUAGE_INVALID, language);
}
- return default_language;
+ return language;
}
#ifdef USE_XLOCALE
-static HB_LOCALE_T C_locale;
+static hb_atomic_ptr_t<HB_LOCALE_T> C_locale;
#ifdef HB_USE_ATEXIT
static void
free_C_locale (void)
{
retry:
- HB_LOCALE_T locale = (HB_LOCALE_T) hb_atomic_ptr_get (&C_locale);
+ HB_LOCALE_T locale = C_locale.get ();
- if (!hb_atomic_ptr_cmpexch (&C_locale, locale, nullptr))
+ if (unlikely (!C_locale.cmpexch (locale, nullptr)))
goto retry;
if (locale)
get_C_locale (void)
{
retry:
- HB_LOCALE_T C = (HB_LOCALE_T) hb_atomic_ptr_get (&C_locale);
+ HB_LOCALE_T C = C_locale.get ();
if (unlikely (!C))
{
C = HB_CREATE_LOCALE ("C");
- if (!hb_atomic_ptr_cmpexch (&C_locale, nullptr, C))
+ if (unlikely (!C_locale.cmpexch (nullptr, C)))
{
- HB_FREE_LOCALE (C_locale);
+ HB_FREE_LOCALE (C);
goto retry;
}
{
hb_shape_plan_t *shape_plan;
plan_node_t *next;
- } *shape_plans;
+ };
+ hb_atomic_ptr_t<plan_node_t> shape_plans;
inline hb_blob_t *reference_table (hb_tag_t tag) const
{
0, /* num_glyphs */
{
-#define HB_SHAPER_IMPLEMENT(shaper) HB_SHAPER_DATA_INVALID,
+#define HB_SHAPER_IMPLEMENT(shaper) HB_ATOMIC_PTR_INIT (HB_SHAPER_DATA_INVALID),
#include "hb-shaper-list.hh"
#undef HB_SHAPER_IMPLEMENT
},
- nullptr, /* shape_plans */
+ HB_ATOMIC_PTR_INIT (nullptr), /* shape_plans */
};
{
if (!hb_object_destroy (face)) return;
- for (hb_face_t::plan_node_t *node = face->shape_plans; node; )
+ for (hb_face_t::plan_node_t *node = face->shape_plans.get (); node; )
{
hb_face_t::plan_node_t *next = node->next;
hb_shape_plan_destroy (node->shape_plan);
nullptr, /* destroy */
{
-#define HB_SHAPER_IMPLEMENT(shaper) HB_SHAPER_DATA_INVALID,
+#define HB_SHAPER_IMPLEMENT(shaper) HB_ATOMIC_PTR_INIT (HB_SHAPER_DATA_INVALID),
#include "hb-shaper-list.hh"
#undef HB_SHAPER_IMPLEMENT
}
return true;
}
-static hb_font_funcs_t *static_ft_funcs = nullptr;
+static hb_atomic_ptr_t<hb_font_funcs_t> static_ft_funcs;
#ifdef HB_USE_ATEXIT
static
void free_static_ft_funcs (void)
{
retry:
- hb_font_funcs_t *ft_funcs = (hb_font_funcs_t *) hb_atomic_ptr_get (&static_ft_funcs);
- if (!hb_atomic_ptr_cmpexch (&static_ft_funcs, ft_funcs, nullptr))
+ hb_font_funcs_t *ft_funcs = static_ft_funcs.get ();
+ if (unlikely (!static_ft_funcs.cmpexch (ft_funcs, nullptr)))
goto retry;
hb_font_funcs_destroy (ft_funcs);
_hb_ft_font_set_funcs (hb_font_t *font, FT_Face ft_face, bool unref)
{
retry:
- hb_font_funcs_t *funcs = (hb_font_funcs_t *) hb_atomic_ptr_get (&static_ft_funcs);
+ hb_font_funcs_t *funcs = static_ft_funcs.get ();
if (unlikely (!funcs))
{
hb_font_funcs_make_immutable (funcs);
- if (!hb_atomic_ptr_cmpexch (&static_ft_funcs, nullptr, funcs)) {
+ if (unlikely (!static_ft_funcs. cmpexch (nullptr, funcs)))
+ {
hb_font_funcs_destroy (funcs);
goto retry;
}
/* Thread-safe, lock-free, FT_Library */
-static FT_Library ft_library;
+static hb_atomic_ptr_t<FT_Library> ft_library;
#ifdef HB_USE_ATEXIT
static
void free_ft_library (void)
{
retry:
- FT_Library library = (FT_Library) hb_atomic_ptr_get (&ft_library);
- if (!hb_atomic_ptr_cmpexch (&ft_library, library, nullptr))
+ FT_Library library = ft_library.get ();
+ if (unlikely (!ft_library.cmpexch (library, nullptr)))
goto retry;
FT_Done_FreeType (library);
get_ft_library (void)
{
retry:
- FT_Library library = (FT_Library) hb_atomic_ptr_get (&ft_library);
+ FT_Library library = ft_library.get ();
if (unlikely (!library))
{
if (FT_Init_FreeType (&library))
return nullptr;
- if (!hb_atomic_ptr_cmpexch (&ft_library, nullptr, library)) {
+ if (unlikely (!ft_library.cmpexch (nullptr, library)))
+ {
FT_Done_FreeType (library);
goto retry;
}
return utf8_decomposed_len;
}
-static hb_unicode_funcs_t *static_glib_funcs = nullptr;
+static hb_atomic_ptr_t<hb_unicode_funcs_t> static_glib_funcs;
#ifdef HB_USE_ATEXIT
static
void free_static_glib_funcs (void)
{
retry:
- hb_unicode_funcs_t *glib_funcs = (hb_unicode_funcs_t *) hb_atomic_ptr_get (&static_glib_funcs);
- if (!hb_atomic_ptr_cmpexch (&static_glib_funcs, glib_funcs, nullptr))
+ hb_unicode_funcs_t *glib_funcs = static_glib_funcs.get ();
+ if (unlikely (!static_glib_funcs.cmpexch (glib_funcs, nullptr)))
goto retry;
hb_unicode_funcs_destroy (glib_funcs);
hb_glib_get_unicode_funcs (void)
{
retry:
- hb_unicode_funcs_t *funcs = (hb_unicode_funcs_t *) hb_atomic_ptr_get (&static_glib_funcs);
+ hb_unicode_funcs_t *funcs = static_glib_funcs.get ();
if (unlikely (!funcs))
{
hb_unicode_funcs_make_immutable (funcs);
- if (!hb_atomic_ptr_cmpexch (&static_glib_funcs, nullptr, funcs)) {
+ if (unlikely (!static_glib_funcs.cmpexch (nullptr, funcs)))
+ {
hb_unicode_funcs_destroy (funcs);
goto retry;
}
* shaper face data
*/
-typedef struct hb_graphite2_tablelist_t {
+typedef struct hb_graphite2_tablelist_t
+{
struct hb_graphite2_tablelist_t *next;
hb_blob_t *blob;
unsigned int tag;
} hb_graphite2_tablelist_t;
-struct hb_graphite2_face_data_t {
+struct hb_graphite2_face_data_t
+{
hb_face_t *face;
gr_face *grface;
- hb_graphite2_tablelist_t *tlist;
+ hb_atomic_ptr_t<hb_graphite2_tablelist_t> tlist;
};
static const void *hb_graphite2_get_table (const void *data, unsigned int tag, size_t *len)
{
hb_graphite2_face_data_t *face_data = (hb_graphite2_face_data_t *) data;
- hb_graphite2_tablelist_t *tlist = face_data->tlist;
+ hb_graphite2_tablelist_t *tlist = face_data->tlist.get ();
hb_blob_t *blob = nullptr;
p->tag = tag;
retry:
- hb_graphite2_tablelist_t *tlist = (hb_graphite2_tablelist_t *) hb_atomic_ptr_get (&face_data->tlist);
+ hb_graphite2_tablelist_t *tlist = face_data->tlist.get ();
p->next = tlist;
- if (!hb_atomic_ptr_cmpexch (&face_data->tlist, tlist, p))
+ if (unlikely (!face_data->tlist.cmpexch (tlist, p)))
goto retry;
}
void
_hb_graphite2_shaper_face_data_destroy (hb_graphite2_face_data_t *data)
{
- hb_graphite2_tablelist_t *tlist = data->tlist;
+ hb_graphite2_tablelist_t *tlist = data->tlist.get ();
while (tlist)
{
}
#if U_ICU_VERSION_MAJOR_NUM >= 49
-static const UNormalizer2 *normalizer;
+static hb_atomic_ptr_t <const UNormalizer2> normalizer;
#endif
static hb_bool_t
{
#if U_ICU_VERSION_MAJOR_NUM >= 49
{
- UChar32 ret = unorm2_composePair (normalizer, a, b);
+ UChar32 ret = unorm2_composePair (normalizer.get (), a, b);
if (ret < 0) return false;
*ab = ret;
return true;
UChar decomposed[4];
int len;
UErrorCode icu_err = U_ZERO_ERROR;
- len = unorm2_getRawDecomposition (normalizer, ab, decomposed,
+ len = unorm2_getRawDecomposition (normalizer.get (), ab, decomposed,
ARRAY_LENGTH (decomposed), &icu_err);
if (U_FAILURE (icu_err) || len < 0) return false;
}
-static hb_unicode_funcs_t *static_icu_funcs = nullptr;
+static hb_atomic_ptr_t<hb_unicode_funcs_t> static_icu_funcs;
#ifdef HB_USE_ATEXIT
static
void free_static_icu_funcs (void)
{
retry:
- hb_unicode_funcs_t *icu_funcs = (hb_unicode_funcs_t *) hb_atomic_ptr_get (&static_icu_funcs);
- if (!hb_atomic_ptr_cmpexch (&static_icu_funcs, icu_funcs, nullptr))
+ hb_unicode_funcs_t *icu_funcs = static_icu_funcs.get ();
+ if (unlikely (!static_icu_funcs.cmpexch (icu_funcs, nullptr)))
goto retry;
hb_unicode_funcs_destroy (icu_funcs);
hb_icu_get_unicode_funcs (void)
{
retry:
- hb_unicode_funcs_t *funcs = (hb_unicode_funcs_t *) hb_atomic_ptr_get (&static_icu_funcs);
+ hb_unicode_funcs_t *funcs = static_icu_funcs.get ();
if (unlikely (!funcs))
{
#if U_ICU_VERSION_MAJOR_NUM >= 49
- if (!hb_atomic_ptr_get (&normalizer)) {
+ if (!normalizer.get ())
+ {
UErrorCode icu_err = U_ZERO_ERROR;
/* We ignore failure in getNFCInstace(). */
- (void) hb_atomic_ptr_cmpexch (&normalizer, nullptr, unorm2_getNFCInstance (&icu_err));
+ (void) normalizer.cmpexch (nullptr, unorm2_getNFCInstance (&icu_err));
}
#endif
hb_unicode_funcs_make_immutable (funcs);
- if (!hb_atomic_ptr_cmpexch (&static_icu_funcs, nullptr, funcs)) {
+ if (unlikely (!static_icu_funcs.cmpexch (nullptr, funcs)))
+ {
hb_unicode_funcs_destroy (funcs);
goto retry;
}
inline void init0 (void) {} /* Init, when memory is already set to 0. No-op for us. */
inline void init (void)
{
- instance = nullptr;
+ instance.set_relaxed (nullptr);
}
inline void fini (void)
{
- if (instance)
- thiz ()->destroy (instance);
+ Stored *p = instance.get ();
+ if (p)
+ thiz ()->destroy (p);
}
inline const Returned * operator -> (void) const { return thiz ()->get (); }
inline Stored * get_stored (void) const
{
retry:
- Stored *p = (Stored *) hb_atomic_ptr_get (&this->instance);
+ Stored *p = this->instance.get ();
if (unlikely (!p))
{
hb_face_t *face = *(((hb_face_t **) this) - WheresFace);
if (unlikely (!p))
p = thiz ()->create (nullptr); /* Produce nil object. */
assert (p);
- if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<Stored **>(&this->instance), nullptr, p)))
+ if (unlikely (!this->instance.cmpexch (nullptr, p)))
{
thiz ()->destroy (p);
goto retry;
/* This *must* be called when there are no other threads accessing.
* However, to make TSan, etc, happy, we using cmpexch. */
retry:
- Stored *p = (Stored *) hb_atomic_ptr_get (&this->instance);
+ Stored *p = this->instance.get ();
if (p)
{
- if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<Stored **>(&this->instance), p, instance_)))
+ if (unlikely (!this->instance.cmpexch (p, instance_)))
goto retry;
thiz ()->destroy (p);
}
private:
/* Must only have one pointer. */
- mutable Stored *instance;
+ hb_atomic_ptr_t<Stored *> instance;
};
/* Specializations. */
struct hb_reference_count_t
{
- hb_atomic_int_t ref_count;
+ mutable hb_atomic_int_t ref_count;
- inline void init (int v) { ref_count.set_relaxed (v); }
+ inline void init (int v = 1) { ref_count.set_relaxed (v); }
inline int get_relaxed (void) const { return ref_count.get_relaxed (); }
- inline int inc (void) { return ref_count.inc (); }
- inline int dec (void) { return ref_count.dec (); }
+ inline int inc (void) const { return ref_count.inc (); }
+ inline int dec (void) const { return ref_count.dec (); }
inline void fini (void) { ref_count.set_relaxed (HB_REFERENCE_COUNT_POISON_VALUE); }
inline bool is_inert (void) const { return ref_count.get_relaxed () == HB_REFERENCE_COUNT_INERT_VALUE; }
struct hb_object_header_t
{
hb_reference_count_t ref_count;
- mutable hb_user_data_array_t *user_data;
+ hb_atomic_ptr_t<hb_user_data_array_t> user_data;
-#define HB_OBJECT_HEADER_STATIC {HB_REFERENCE_COUNT_INIT, nullptr}
+#define HB_OBJECT_HEADER_STATIC {HB_REFERENCE_COUNT_INIT, HB_ATOMIC_PTR_INIT (nullptr)}
private:
ASSERT_POD ();
template <typename Type>
static inline void hb_object_init (Type *obj)
{
- obj->header.ref_count.init (1);
- obj->header.user_data = nullptr;
+ obj->header.ref_count.init ();
+ obj->header.user_data.init ();
}
template <typename Type>
static inline bool hb_object_is_inert (const Type *obj)
static inline void hb_object_fini (Type *obj)
{
obj->header.ref_count.fini (); /* Do this before user_data */
- if (obj->header.user_data)
+ hb_user_data_array_t *user_data = obj->header.user_data.get ();
+ if (user_data)
{
- obj->header.user_data->fini ();
- free (obj->header.user_data);
+ user_data->fini ();
+ free (user_data);
}
}
template <typename Type>
assert (hb_object_is_valid (obj));
retry:
- hb_user_data_array_t *user_data = (hb_user_data_array_t *) hb_atomic_ptr_get (&obj->header.user_data);
+ hb_user_data_array_t *user_data = obj->header.user_data.get ();
if (unlikely (!user_data))
{
user_data = (hb_user_data_array_t *) calloc (sizeof (hb_user_data_array_t), 1);
if (unlikely (!user_data))
return false;
user_data->init ();
- if (unlikely (!hb_atomic_ptr_cmpexch (&obj->header.user_data, nullptr, user_data)))
+ if (unlikely (!obj->header.user_data.cmpexch (nullptr, user_data)))
{
user_data->fini ();
free (user_data);
static inline void *hb_object_get_user_data (Type *obj,
hb_user_data_key_t *key)
{
- if (unlikely (!obj || hb_object_is_inert (obj) || !obj->header.user_data))
+ if (unlikely (!obj || hb_object_is_inert (obj) || !obj->header.user_data.get ()))
return nullptr;
assert (hb_object_is_valid (obj));
return obj->header.user_data->get (key);
return ot_font->v_metrics.has_font_extents;
}
-static hb_font_funcs_t *static_ot_funcs = nullptr;
+static hb_atomic_ptr_t <hb_font_funcs_t> static_ot_funcs;
#ifdef HB_USE_ATEXIT
static
void free_static_ot_funcs (void)
{
retry:
- hb_font_funcs_t *ot_funcs = (hb_font_funcs_t *) hb_atomic_ptr_get (&static_ot_funcs);
- if (!hb_atomic_ptr_cmpexch (&static_ot_funcs, ot_funcs, nullptr))
+ hb_font_funcs_t *ot_funcs = static_ot_funcs.get ();
+ if (unlikely (!static_ot_funcs.cmpexch (ot_funcs, nullptr)))
goto retry;
hb_font_funcs_destroy (ot_funcs);
_hb_ot_get_font_funcs (void)
{
retry:
- hb_font_funcs_t *funcs = (hb_font_funcs_t *) hb_atomic_ptr_get (&static_ot_funcs);
+ hb_font_funcs_t *funcs = static_ot_funcs.get ();
if (unlikely (!funcs))
{
hb_font_funcs_make_immutable (funcs);
- if (!hb_atomic_ptr_cmpexch (&static_ot_funcs, nullptr, funcs)) {
+ if (unlikely (!static_ot_funcs.cmpexch (nullptr, funcs)))
+ {
hb_font_funcs_destroy (funcs);
goto retry;
}
_hb_ot_layout_destroy (hb_ot_layout_t *layout);
-#define hb_ot_layout_from_face(face) ((hb_ot_layout_t *) face->shaper_data.ot)
+#define hb_ot_layout_from_face(face) ((hb_ot_layout_t *) face->shaper_data.ot.get_relaxed ())
/*
inline void fini (void)
{
index_to_offset.fini ();
- free (gids_sorted_by_name);
+ free (gids_sorted_by_name.get ());
}
inline bool get_glyph_name (hb_codepoint_t glyph,
return false;
retry:
- uint16_t *gids = (uint16_t *) hb_atomic_ptr_get (&gids_sorted_by_name);
+ uint16_t *gids = gids_sorted_by_name.get ();
if (unlikely (!gids))
{
gids[i] = i;
hb_sort_r (gids, count, sizeof (gids[0]), cmp_gids, (void *) this);
- if (!hb_atomic_ptr_cmpexch (&gids_sorted_by_name, nullptr, gids)) {
+ if (unlikely (!gids_sorted_by_name.cmpexch (nullptr, gids)))
+ {
free (gids);
goto retry;
}
const ArrayOf<HBUINT16> *glyphNameIndex;
hb_vector_t<uint32_t, 1> index_to_offset;
const uint8_t *pool;
- mutable uint16_t *gids_sorted_by_name;
+ hb_atomic_ptr_t<uint16_t *> gids_sorted_by_name;
};
public:
* mask_array[NONE] == 0. */
hb_mask_t mask_array[ARABIC_NUM_FEATURES + 1];
- mutable arabic_fallback_plan_t *fallback_plan;
+ hb_atomic_ptr_t<arabic_fallback_plan_t> fallback_plan;
unsigned int do_fallback : 1;
unsigned int has_stch : 1;
{
arabic_shape_plan_t *arabic_plan = (arabic_shape_plan_t *) data;
- arabic_fallback_plan_destroy (arabic_plan->fallback_plan);
+ arabic_fallback_plan_destroy (arabic_plan->fallback_plan.get ());
free (data);
}
return;
retry:
- arabic_fallback_plan_t *fallback_plan = (arabic_fallback_plan_t *) hb_atomic_ptr_get (&arabic_plan->fallback_plan);
+ arabic_fallback_plan_t *fallback_plan = arabic_plan->fallback_plan.get ();
if (unlikely (!fallback_plan))
{
/* This sucks. We need a font to build the fallback plan... */
fallback_plan = arabic_fallback_plan_create (plan, font);
- if (unlikely (!hb_atomic_ptr_cmpexch (&(const_cast<arabic_shape_plan_t *> (arabic_plan))->fallback_plan, nullptr, fallback_plan))) {
+ if (unlikely (!arabic_plan->fallback_plan.cmpexch (nullptr, fallback_plan)))
+ {
arabic_fallback_plan_destroy (fallback_plan);
goto retry;
}
#define HB_SHAPER_PLAN(shaper) \
HB_STMT_START { \
- if (hb_##shaper##_shaper_face_data_ensure (shape_plan->face_unsafe)) { \
- HB_SHAPER_DATA (shaper, shape_plan) = \
+ if (hb_##shaper##_shaper_face_data_ensure (shape_plan->face_unsafe)) \
+ { \
+ /* XXX-MT-bug What happened to *ensure*ing this?!!!! */ \
+ HB_SHAPER_DATA (shaper, shape_plan).set_relaxed ( \
HB_SHAPER_DATA_CREATE_FUNC (shaper, shape_plan) (shape_plan, \
user_features, num_user_features, \
- coords, num_coords); \
+ coords, num_coords)); \
shape_plan->shaper_func = _hb_##shaper##_shape; \
shape_plan->shaper_name = #shaper; \
return; \
0, /* num_coords */
{
-#define HB_SHAPER_IMPLEMENT(shaper) HB_SHAPER_DATA_INVALID,
+#define HB_SHAPER_IMPLEMENT(shaper) HB_ATOMIC_PTR_INIT (HB_SHAPER_DATA_INVALID),
#include "hb-shaper-list.hh"
#undef HB_SHAPER_IMPLEMENT
- }
+ },
};
#define HB_SHAPER_EXECUTE(shaper) \
HB_STMT_START { \
- return HB_SHAPER_DATA (shaper, shape_plan) && \
+ return HB_SHAPER_DATA (shaper, shape_plan).get () && \
hb_##shaper##_shaper_font_data_ensure (font) && \
_hb_##shaper##_shape (shape_plan, font, buffer, features, num_features); \
} HB_STMT_END
retry:
- hb_face_t::plan_node_t *cached_plan_nodes = (hb_face_t::plan_node_t *) hb_atomic_ptr_get (&face->shape_plans);
+ hb_face_t::plan_node_t *cached_plan_nodes = face->shape_plans.get ();
/* Don't look for plan in the cache if there were variation coordinates XXX Fix me. */
if (!hb_coords_present (coords, num_coords))
node->shape_plan = shape_plan;
node->next = cached_plan_nodes;
- if (!hb_atomic_ptr_cmpexch (&face->shape_plans, cached_plan_nodes, node)) {
+ if (unlikely (!face->shape_plans.cmpexch (cached_plan_nodes, node)))
+ {
hb_shape_plan_destroy (shape_plan);
free (node);
goto retry;
* contains the output glyphs and their positions.
**/
-static const char **static_shaper_list;
+static hb_atomic_ptr_t <const char **> static_shaper_list;
#ifdef HB_USE_ATEXIT
static
void free_static_shaper_list (void)
{
retry:
- const char **shaper_list = (const char **) hb_atomic_ptr_get (&static_shaper_list);
- if (!hb_atomic_ptr_cmpexch (&static_shaper_list, shaper_list, nullptr))
+ const char **shaper_list = static_shaper_list.get ();
+ if (unlikely (!static_shaper_list.cmpexch (shaper_list, nullptr)))
goto retry;
free (shaper_list);
hb_shape_list_shapers (void)
{
retry:
- const char **shaper_list = (const char **) hb_atomic_ptr_get (&static_shaper_list);
+ const char **shaper_list = static_shaper_list.get ();
if (unlikely (!shaper_list))
{
shaper_list[i] = shapers[i].name;
shaper_list[i] = nullptr;
- if (!hb_atomic_ptr_cmpexch (&static_shaper_list, nullptr, shaper_list)) {
+ if (unlikely (!static_shaper_list.cmpexch (nullptr, shaper_list)))
+ {
free (shaper_list);
goto retry;
}
#ifdef HB_SHAPER
-#define HB_SHAPER_DATA_GET(object) HB_SHAPER_DATA (HB_SHAPER, object)
+#define HB_SHAPER_DATA_GET(object) HB_SHAPER_DATA (HB_SHAPER, object).get ()
#endif
#define HB_SHAPER_DATA_TYPE_NAME(shaper, object) hb_##shaper##_##object##_data_t
#define HB_SHAPER_DATA_TYPE(shaper, object) struct HB_SHAPER_DATA_TYPE_NAME(shaper, object)
-#define HB_SHAPER_DATA_INSTANCE(shaper, object, instance) (* (HB_SHAPER_DATA_TYPE(shaper, object) **) &(instance)->shaper_data.shaper)
+#define HB_SHAPER_DATA_INSTANCE(shaper, object, instance) (* reinterpret_cast<hb_atomic_ptr_t<HB_SHAPER_DATA_TYPE(shaper, object) *> *> (&(instance)->shaper_data.shaper))
#define HB_SHAPER_DATA(shaper, object) HB_SHAPER_DATA_INSTANCE(shaper, object, object)
#define HB_SHAPER_DATA_CREATE_FUNC(shaper, object) _hb_##shaper##_shaper_##object##_data_create
#define HB_SHAPER_DATA_DESTROY_FUNC(shaper, object) _hb_##shaper##_shaper_##object##_data_destroy
HB_SHAPER_DATA_ENSURE_FUNC (shaper, object) (hb_##object##_t *object)
#define HB_SHAPER_DATA_DESTROY(shaper, object) \
- if (HB_SHAPER_DATA_TYPE (shaper, object) *data = HB_SHAPER_DATA (shaper, object)) \
+ if (HB_SHAPER_DATA_TYPE (shaper, object) *data = HB_SHAPER_DATA (shaper, object).get ()) \
if (data != HB_SHAPER_DATA_INVALID && data != HB_SHAPER_DATA_SUCCEEDED) \
HB_SHAPER_DATA_DESTROY_FUNC (shaper, object) (data);
HB_SHAPER_DATA_ENSURE_FUNC(shaper, object) (hb_##object##_t *object) \
{\
retry: \
- HB_SHAPER_DATA_TYPE (shaper, object) *data = (HB_SHAPER_DATA_TYPE (shaper, object) *) hb_atomic_ptr_get (&HB_SHAPER_DATA (shaper, object)); \
+ HB_SHAPER_DATA_TYPE (shaper, object) *data = HB_SHAPER_DATA (shaper, object).get (); \
if (likely (data) && !(condition)) { \
- /* Note that evaluating condition above can be dangerous if another thread \
+ /* XXX-MT-bug \
+ * Note that evaluating condition above can be dangerous if another thread \
* got here first and destructed data. That's, as always, bad use pattern. \
* If you modify the font (change font size), other threads must not be \
* using it at the same time. However, since this check is delayed to \
/* Drop and recreate. */ \
/* If someone dropped it in the mean time, throw it away and don't touch it. \
* Otherwise, destruct it. */ \
- if (hb_atomic_ptr_cmpexch (&HB_SHAPER_DATA (shaper, object), data, nullptr)) { \
+ if (likely (HB_SHAPER_DATA (shaper, object).cmpexch (data, nullptr))) \
+ { \
HB_SHAPER_DATA_DESTROY_FUNC (shaper, object) (data); \
} \
goto retry; \
data = HB_SHAPER_DATA_CREATE_FUNC (shaper, object) (object); \
if (unlikely (!data)) \
data = (HB_SHAPER_DATA_TYPE (shaper, object) *) HB_SHAPER_DATA_INVALID; \
- if (!hb_atomic_ptr_cmpexch (&HB_SHAPER_DATA (shaper, object), nullptr, data)) { \
+ if (unlikely (!HB_SHAPER_DATA (shaper, object).cmpexch (nullptr, data))) { \
if (data && \
data != HB_SHAPER_DATA_INVALID && \
data != HB_SHAPER_DATA_SUCCEEDED) \
/* For embedding in face / font / ... */
struct hb_shaper_data_t {
-#define HB_SHAPER_IMPLEMENT(shaper) void *shaper;
+#define HB_SHAPER_IMPLEMENT(shaper) hb_atomic_ptr_t<void *> shaper;
#include "hb-shaper-list.hh"
#undef HB_SHAPER_IMPLEMENT
};
/* Thread-safe, lock-free, shapers */
-static const hb_shaper_pair_t *static_shapers;
+static hb_atomic_ptr_t<const hb_shaper_pair_t> static_shapers;
#ifdef HB_USE_ATEXIT
static
void free_static_shapers (void)
{
retry:
- hb_shaper_pair_t *shapers = (hb_shaper_pair_t *) hb_atomic_ptr_get (&static_shapers);
- if (!hb_atomic_ptr_cmpexch (&static_shapers, shapers, nullptr))
+ const hb_shaper_pair_t *shapers = static_shapers.get ();
+ if (unlikely (!static_shapers.cmpexch (shapers, nullptr)))
goto retry;
if (unlikely (shapers != all_shapers))
_hb_shapers_get (void)
{
retry:
- hb_shaper_pair_t *shapers = (hb_shaper_pair_t *) hb_atomic_ptr_get (&static_shapers);
+ hb_shaper_pair_t *shapers = const_cast<hb_shaper_pair_t *> (static_shapers.get ());
if (unlikely (!shapers))
{
char *env = getenv ("HB_SHAPER_LIST");
if (!env || !*env) {
- (void) hb_atomic_ptr_cmpexch (&static_shapers, nullptr, &all_shapers[0]);
+ (void) static_shapers.cmpexch (nullptr, &all_shapers[0]);
return (const hb_shaper_pair_t *) all_shapers;
}
/* Not found; allocate one. */
shapers = (hb_shaper_pair_t *) calloc (1, sizeof (all_shapers));
- if (unlikely (!shapers)) {
- (void) hb_atomic_ptr_cmpexch (&static_shapers, nullptr, &all_shapers[0]);
+ if (unlikely (!shapers))
+ {
+ (void) static_shapers.cmpexch (nullptr, &all_shapers[0]);
return (const hb_shaper_pair_t *) all_shapers;
}
p = end + 1;
}
- if (!hb_atomic_ptr_cmpexch (&static_shapers, nullptr, shapers)) {
+ if (unlikely (!static_shapers.cmpexch (nullptr, shapers)))
+ {
free (shapers);
goto retry;
}
return ucdn_compat_decompose(u, decomposed);
}
-static hb_unicode_funcs_t *static_ucdn_funcs = nullptr;
+static hb_atomic_ptr_t<hb_unicode_funcs_t> static_ucdn_funcs;
#ifdef HB_USE_ATEXIT
static
void free_static_ucdn_funcs (void)
{
retry:
- hb_unicode_funcs_t *ucdn_funcs = (hb_unicode_funcs_t *) hb_atomic_ptr_get (&static_ucdn_funcs);
- if (!hb_atomic_ptr_cmpexch (&static_ucdn_funcs, ucdn_funcs, nullptr))
+ hb_unicode_funcs_t *ucdn_funcs = static_ucdn_funcs.get ();
+ if (unlikely (!static_ucdn_funcs.cmpexch (ucdn_funcs, nullptr)))
goto retry;
hb_unicode_funcs_destroy (ucdn_funcs);
hb_ucdn_get_unicode_funcs (void)
{
retry:
- hb_unicode_funcs_t *funcs = (hb_unicode_funcs_t *) hb_atomic_ptr_get (&static_ucdn_funcs);
+ hb_unicode_funcs_t *funcs = static_ucdn_funcs.get ();
if (unlikely (!funcs))
{
hb_unicode_funcs_make_immutable (funcs);
- if (!hb_atomic_ptr_cmpexch (&static_ucdn_funcs, nullptr, funcs)) {
+ if (unlikely (!static_ucdn_funcs.cmpexch (nullptr, funcs)))
+ {
hb_unicode_funcs_destroy (funcs);
goto retry;
}
}
}
};
-static hb_uniscribe_shaper_funcs_t *uniscribe_funcs;
+static hb_atomic_ptr_t<hb_uniscribe_shaper_funcs_t> uniscribe_funcs;
#ifdef HB_USE_ATEXIT
static inline void
free_uniscribe_funcs (void)
{
retry:
- hb_uniscribe_shaper_funcs_t *local_uniscribe_funcs =
- (hb_uniscribe_shaper_funcs_t *) hb_atomic_ptr_get (&uniscribe_funcs);
- if (!hb_atomic_ptr_cmpexch (&uniscribe_funcs, local_uniscribe_funcs, nullptr))
+ hb_uniscribe_shaper_funcs_t *local_uniscribe_funcs = uniscribe_funcs.get ();
+ if (unlikely (!uniscribe_funcs.cmpexch (local_uniscribe_funcs, nullptr)))
goto retry;
- free (uniscribe_funcs);
+ free (local_uniscribe_funcs);
}
#endif
hb_uniscribe_shaper_get_funcs (void)
{
retry:
- hb_uniscribe_shaper_funcs_t *funcs = (hb_uniscribe_shaper_funcs_t *) hb_atomic_ptr_get (&uniscribe_funcs);
+ hb_uniscribe_shaper_funcs_t *funcs = uniscribe_funcs.get ();
if (unlikely (!funcs))
{
funcs->init ();
- if (!hb_atomic_ptr_cmpexch (&uniscribe_funcs, nullptr, funcs)) {
+ if (unlikely (!uniscribe_funcs.cmpexch (nullptr, funcs)))
+ {
free (funcs);
goto retry;
}