2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
32 #include "hb-private.hh"
33 #include "hb-debug.hh"
34 #include "hb-blob-private.hh"
35 #include "hb-face-private.hh"
46 /* Cast to struct T, reference to reference */
47 template<typename Type, typename TObject>
48 static inline const Type& CastR(const TObject &X)
49 { return reinterpret_cast<const Type&> (X); }
50 template<typename Type, typename TObject>
51 static inline Type& CastR(TObject &X)
52 { return reinterpret_cast<Type&> (X); }
54 /* Cast to struct T, pointer to pointer */
55 template<typename Type, typename TObject>
56 static inline const Type* CastP(const TObject *X)
57 { return reinterpret_cast<const Type*> (X); }
58 template<typename Type, typename TObject>
59 static inline Type* CastP(TObject *X)
60 { return reinterpret_cast<Type*> (X); }
62 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
63 * location pointed to by P plus Ofs bytes. */
64 template<typename Type>
65 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
66 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
67 template<typename Type>
68 static inline Type& StructAtOffset(void *P, unsigned int offset)
69 { return * reinterpret_cast<Type*> ((char *) P + offset); }
71 /* StructAfter<T>(X) returns the struct T& that is placed after X.
72 * Works with X of variable size also. X must implement get_size() */
73 template<typename Type, typename TObject>
74 static inline const Type& StructAfter(const TObject &X)
75 { return StructAtOffset<Type>(&X, X.get_size()); }
76 template<typename Type, typename TObject>
77 static inline Type& StructAfter(TObject &X)
78 { return StructAtOffset<Type>(&X, X.get_size()); }
86 /* Check _assertion in a method environment */
87 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
88 inline void _instance_assertion_on_line_##_line (void) const \
90 static_assert ((_assertion), ""); \
91 ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
93 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
94 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
96 /* Check that _code compiles in a method environment */
97 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
98 inline void _compiles_assertion_on_line_##_line (void) const \
100 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
101 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
104 #define DEFINE_SIZE_STATIC(size) \
105 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
106 static const unsigned int static_size = (size); \
107 static const unsigned int min_size = (size); \
108 inline unsigned int get_size (void) const { return (size); }
110 #define DEFINE_SIZE_UNION(size, _member) \
111 DEFINE_INSTANCE_ASSERTION (0*sizeof(this->u._member.static_size) + sizeof(this->u._member) == (size)); \
112 static const unsigned int min_size = (size)
114 #define DEFINE_SIZE_MIN(size) \
115 DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
116 static const unsigned int min_size = (size)
118 #define DEFINE_SIZE_ARRAY(size, array) \
119 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
120 DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
121 static const unsigned int min_size = (size)
123 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
124 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
125 DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
126 static const unsigned int min_size = (size)
134 template <typename Context, typename Return, unsigned int MaxDebugDepth>
135 struct hb_dispatch_context_t
137 static const unsigned int max_debug_depth = MaxDebugDepth;
138 typedef Return return_t;
139 template <typename T, typename F>
140 inline bool may_dispatch (const T *obj, const F *format) { return true; }
141 static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
149 /* This limits sanitizing time on really broken fonts. */
150 #ifndef HB_SANITIZE_MAX_EDITS
151 #define HB_SANITIZE_MAX_EDITS 32
153 #ifndef HB_SANITIZE_MAX_OPS_FACTOR
154 #define HB_SANITIZE_MAX_OPS_FACTOR 8
156 #ifndef HB_SANITIZE_MAX_OPS_MIN
157 #define HB_SANITIZE_MAX_OPS_MIN 16384
160 struct hb_sanitize_context_t :
161 hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
163 inline hb_sanitize_context_t (void) :
165 start (nullptr), end (nullptr),
166 writable (false), edit_count (0), max_ops (0),
170 inline const char *get_name (void) { return "SANITIZE"; }
171 template <typename T, typename F>
172 inline bool may_dispatch (const T *obj, const F *format)
173 { return format->sanitize (this); }
174 template <typename T>
175 inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
176 static return_t default_return_value (void) { return true; }
177 static return_t no_dispatch_return_value (void) { return false; }
178 bool stop_sublookup_iteration (const return_t r) const { return !r; }
180 inline void init (hb_blob_t *b)
182 this->blob = hb_blob_reference (b);
183 this->writable = false;
186 inline void start_processing (void)
188 this->start = hb_blob_get_data (this->blob, nullptr);
189 this->end = this->start + this->blob->length;
190 assert (this->start <= this->end); /* Must not overflow. */
191 this->max_ops = MAX ((unsigned int) (this->end - this->start) * HB_SANITIZE_MAX_OPS_FACTOR,
192 (unsigned) HB_SANITIZE_MAX_OPS_MIN);
193 this->edit_count = 0;
194 this->debug_depth = 0;
196 DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
197 "start [%p..%p] (%lu bytes)",
198 this->start, this->end,
199 (unsigned long) (this->end - this->start));
202 inline void end_processing (void)
204 DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
205 "end [%p..%p] %u edit requests",
206 this->start, this->end, this->edit_count);
208 hb_blob_destroy (this->blob);
209 this->blob = nullptr;
210 this->start = this->end = nullptr;
213 inline bool check_range (const void *base, unsigned int len) const
215 const char *p = (const char *) base;
216 bool ok = this->max_ops-- > 0 &&
219 (unsigned int) (this->end - p) >= len;
221 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
222 "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
224 this->start, this->end,
225 ok ? "OK" : "OUT-OF-RANGE");
230 inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
232 const char *p = (const char *) base;
233 bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
234 unsigned int array_size = record_size * len;
235 bool ok = !overflows && this->check_range (base, array_size);
237 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
238 "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
239 p, p + (record_size * len), record_size, len, (unsigned int) array_size,
240 this->start, this->end,
241 overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
246 template <typename Type>
247 inline bool check_struct (const Type *obj) const
249 return likely (this->check_range (obj, obj->min_size));
252 inline bool may_edit (const void *base, unsigned int len)
254 if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
257 const char *p = (const char *) base;
260 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
261 "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
264 this->start, this->end,
265 this->writable ? "GRANTED" : "DENIED");
267 return this->writable;
270 template <typename Type, typename ValueType>
271 inline bool try_set (const Type *obj, const ValueType &v) {
272 if (this->may_edit (obj, obj->static_size)) {
273 const_cast<Type *> (obj)->set (v);
279 mutable unsigned int debug_depth;
280 const char *start, *end;
282 unsigned int edit_count;
285 unsigned int num_glyphs;
290 /* Template to sanitize an object. */
291 template <typename Type>
294 inline Sanitizer (void) {}
296 inline hb_blob_t *sanitize (hb_blob_t *blob) {
299 /* TODO is_sane() stuff */
304 DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
306 c->start_processing ();
308 if (unlikely (!c->start)) {
309 c->end_processing ();
313 Type *t = CastP<Type> (const_cast<char *> (c->start));
315 sane = t->sanitize (c);
318 DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
320 /* sanitize again to ensure no toe-stepping */
322 sane = t->sanitize (c);
324 DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
329 unsigned int edit_count = c->edit_count;
330 if (edit_count && !c->writable) {
331 c->start = hb_blob_get_data_writable (blob, nullptr);
332 c->end = c->start + blob->length;
336 /* ok, we made it writable by relocating. try again */
337 DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
343 c->end_processing ();
345 DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
353 hb_blob_destroy (blob);
354 return hb_blob_get_empty ();
358 inline void set_num_glyphs (unsigned int num_glyphs) { c->num_glyphs = num_glyphs; }
361 hb_sanitize_context_t c[1];
371 struct hb_serialize_context_t
373 inline hb_serialize_context_t (void *start_, unsigned int size)
375 this->start = (char *) start_;
376 this->end = this->start + size;
378 this->ran_out_of_room = false;
379 this->head = this->start;
380 this->debug_depth = 0;
383 template <typename Type>
384 inline Type *start_serialize (void)
386 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
387 "start [%p..%p] (%lu bytes)",
388 this->start, this->end,
389 (unsigned long) (this->end - this->start));
391 return start_embed<Type> ();
394 inline void end_serialize (void)
396 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
397 "end [%p..%p] serialized %d bytes; %s",
398 this->start, this->end,
399 (int) (this->head - this->start),
400 this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
404 template <typename Type>
405 inline Type *copy (void)
407 assert (!this->ran_out_of_room);
408 unsigned int len = this->head - this->start;
409 void *p = malloc (len);
411 memcpy (p, this->start, len);
412 return reinterpret_cast<Type *> (p);
415 template <typename Type>
416 inline Type *allocate_size (unsigned int size)
418 if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
419 this->ran_out_of_room = true;
422 memset (this->head, 0, size);
423 char *ret = this->head;
425 return reinterpret_cast<Type *> (ret);
428 template <typename Type>
429 inline Type *allocate_min (void)
431 return this->allocate_size<Type> (Type::min_size);
434 template <typename Type>
435 inline Type *start_embed (void)
437 Type *ret = reinterpret_cast<Type *> (this->head);
441 template <typename Type>
442 inline Type *embed (const Type &obj)
444 unsigned int size = obj.get_size ();
445 Type *ret = this->allocate_size<Type> (size);
446 if (unlikely (!ret)) return nullptr;
447 memcpy (ret, obj, size);
451 template <typename Type>
452 inline Type *extend_min (Type &obj)
454 unsigned int size = obj.min_size;
455 assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
456 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
457 return reinterpret_cast<Type *> (&obj);
460 template <typename Type>
461 inline Type *extend (Type &obj)
463 unsigned int size = obj.get_size ();
464 assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
465 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
466 return reinterpret_cast<Type *> (&obj);
469 inline void truncate (void *new_head)
471 assert (this->start < new_head && new_head <= this->head);
472 this->head = (char *) new_head;
475 unsigned int debug_depth;
476 char *start, *end, *head;
477 bool ran_out_of_room;
480 template <typename Type>
483 inline Supplier (const Type *array, unsigned int len_, unsigned int stride_=sizeof(Type))
489 inline const Type operator [] (unsigned int i) const
491 if (unlikely (i >= len)) return Type ();
492 return * (const Type *) (const void *) ((const char *) head + stride * i);
495 inline Supplier<Type> & operator += (unsigned int count)
497 if (unlikely (count > len))
500 head = (const Type *) (const void *) ((const char *) head + stride * count);
505 inline Supplier (const Supplier<Type> &); /* Disallow copy */
506 inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
516 * The OpenType Font File: Data Types
520 /* "The following data types are used in the OpenType font file.
521 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
528 template <typename Type, int Bytes> struct BEInt;
530 template <typename Type>
531 struct BEInt<Type, 1>
534 inline void set (Type V)
538 inline operator Type (void) const
544 template <typename Type>
545 struct BEInt<Type, 2>
548 inline void set (Type V)
550 v[0] = (V >> 8) & 0xFF;
553 inline operator Type (void) const
558 private: uint8_t v[2];
560 template <typename Type>
561 struct BEInt<Type, 3>
564 inline void set (Type V)
566 v[0] = (V >> 16) & 0xFF;
567 v[1] = (V >> 8) & 0xFF;
570 inline operator Type (void) const
576 private: uint8_t v[3];
578 template <typename Type>
579 struct BEInt<Type, 4>
582 inline void set (Type V)
584 v[0] = (V >> 24) & 0xFF;
585 v[1] = (V >> 16) & 0xFF;
586 v[2] = (V >> 8) & 0xFF;
589 inline operator Type (void) const
596 private: uint8_t v[4];
599 /* Integer types in big-endian order and no alignment requirement */
600 template <typename Type, unsigned int Size>
603 inline void set (Type i) { v.set (i); }
604 inline operator Type(void) const { return v; }
605 inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
606 inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
607 static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
608 template <typename Type2>
609 inline int cmp (Type2 a) const
612 if (sizeof (Type) < sizeof (int) && sizeof (Type2) < sizeof (int))
613 return (int) a - (int) b;
615 return a < b ? -1 : a == b ? 0 : +1;
617 inline bool sanitize (hb_sanitize_context_t *c) const
619 TRACE_SANITIZE (this);
620 return_trace (likely (c->check_struct (this)));
625 DEFINE_SIZE_STATIC (Size);
628 typedef IntType<uint8_t, 1> HBUINT8; /* 8-bit unsigned integer. */
629 typedef IntType<int8_t, 1> HBINT8; /* 8-bit signed integer. */
630 typedef IntType<uint16_t, 2> HBUINT16; /* 16-bit unsigned integer. */
631 typedef IntType<int16_t, 2> HBINT16; /* 16-bit signed integer. */
632 typedef IntType<uint32_t, 4> HBUINT32; /* 32-bit unsigned integer. */
633 typedef IntType<int32_t, 4> HBINT32; /* 32-bit signed integer. */
634 typedef IntType<uint32_t, 3> HBUINT24; /* 24-bit unsigned integer. */
636 /* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */
637 typedef HBINT16 FWORD;
639 /* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */
640 typedef HBUINT16 UFWORD;
642 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
643 struct F2DOT14 : HBINT16
646 inline float to_float (void) const { return ((int32_t) v) / 16384.f; }
647 inline void set_float (float f) { v.set (round (f * 16384.f)); }
649 DEFINE_SIZE_STATIC (2);
652 /* 32-bit signed fixed-point number (16.16). */
653 struct Fixed : HBINT32
656 inline float to_float (void) const { return ((int32_t) v) / 65536.f; }
657 inline void set_float (float f) { v.set (round (f * 65536.f)); }
659 DEFINE_SIZE_STATIC (4);
662 /* Date represented in number of seconds since 12:00 midnight, January 1,
663 * 1904. The value is represented as a signed 64-bit integer. */
666 inline bool sanitize (hb_sanitize_context_t *c) const
668 TRACE_SANITIZE (this);
669 return_trace (likely (c->check_struct (this)));
675 DEFINE_SIZE_STATIC (8);
678 /* Array of four uint8s (length = 32 bits) used to identify a script, language
679 * system, feature, or baseline */
680 struct Tag : HBUINT32
682 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */
683 inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
684 inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
686 DEFINE_SIZE_STATIC (4);
688 DEFINE_NULL_DATA (OT, Tag, " ");
690 /* Glyph index number, same as uint16 (length = 16 bits) */
691 typedef HBUINT16 GlyphID;
693 /* Name-table index, same as uint16 (length = 16 bits) */
694 typedef HBUINT16 NameID;
696 /* Script/language-system/feature index */
697 struct Index : HBUINT16 {
698 static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
700 DEFINE_NULL_DATA (OT, Index, "\xff\xff");
702 /* Offset, Null offset = 0 */
703 template <typename Type>
706 inline bool is_null (void) const { return 0 == *this; }
708 inline void *serialize (hb_serialize_context_t *c, const void *base)
710 void *t = c->start_embed<void> ();
711 this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
716 DEFINE_SIZE_STATIC (sizeof(Type));
719 typedef Offset<HBUINT16> Offset16;
720 typedef Offset<HBUINT32> Offset32;
724 struct CheckSum : HBUINT32
726 /* This is reference implementation from the spec. */
727 static inline uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length)
730 assert (0 == (Length & 3));
731 const HBUINT32 *EndPtr = Table + Length / HBUINT32::static_size;
733 while (Table < EndPtr)
738 /* Note: data should be 4byte aligned and have 4byte padding at the end. */
739 inline void set_for_data (const void *data, unsigned int length)
740 { set (CalcTableChecksum ((const HBUINT32 *) data, length)); }
743 DEFINE_SIZE_STATIC (4);
751 template <typename FixedType=HBUINT16>
754 inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
756 inline bool sanitize (hb_sanitize_context_t *c) const
758 TRACE_SANITIZE (this);
759 return_trace (c->check_struct (this));
765 DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
771 * Template subclasses of Offset that do the dereferencing.
775 template <typename Type, typename OffsetType=HBUINT16>
776 struct OffsetTo : Offset<OffsetType>
778 inline const Type& operator () (const void *base) const
780 unsigned int offset = *this;
781 if (unlikely (!offset)) return Null(Type);
782 return StructAtOffset<const Type> (base, offset);
784 inline Type& operator () (void *base) const
786 unsigned int offset = *this;
787 if (unlikely (!offset)) return Crap(Type);
788 return StructAtOffset<Type> (base, offset);
791 inline Type& serialize (hb_serialize_context_t *c, const void *base)
793 return * (Type *) Offset<OffsetType>::serialize (c, base);
796 inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
798 TRACE_SANITIZE (this);
799 if (unlikely (!c->check_struct (this))) return_trace (false);
800 unsigned int offset = *this;
801 if (unlikely (!offset)) return_trace (true);
802 if (unlikely (!c->check_range (base, offset))) return_trace (false);
803 const Type &obj = StructAtOffset<Type> (base, offset);
804 return_trace (likely (obj.sanitize (c)) || neuter (c));
806 template <typename T>
807 inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
809 TRACE_SANITIZE (this);
810 if (unlikely (!c->check_struct (this))) return_trace (false);
811 unsigned int offset = *this;
812 if (unlikely (!offset)) return_trace (true);
813 if (unlikely (!c->check_range (base, offset))) return_trace (false);
814 const Type &obj = StructAtOffset<Type> (base, offset);
815 return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
818 /* Set the offset to Null */
819 inline bool neuter (hb_sanitize_context_t *c) const {
820 return c->try_set (this, 0);
822 DEFINE_SIZE_STATIC (sizeof(OffsetType));
824 template <typename Type> struct LOffsetTo : OffsetTo<Type, HBUINT32> {};
825 template <typename Base, typename OffsetType, typename Type>
826 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
827 template <typename Base, typename OffsetType, typename Type>
828 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
836 /* TODO Use it in ArrayOf, HeadlessArrayOf, and other places around the code base?? */
837 template <typename Type>
838 struct UnsizedArrayOf
840 inline const Type& operator [] (unsigned int i) const { return arrayZ[i]; }
841 inline Type& operator [] (unsigned int i) { return arrayZ[i]; }
843 inline bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
845 TRACE_SANITIZE (this);
846 if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
848 /* Note: for structs that do not reference other structs,
849 * we do not need to call their sanitize() as we already did
850 * a bound check on the aggregate array size. We just include
851 * a small unreachable expression to make sure the structs
852 * pointed to do have a simple sanitize(), ie. they do not
853 * reference other structs via offsets.
855 (void) (false && arrayZ[0].sanitize (c));
859 inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base) const
861 TRACE_SANITIZE (this);
862 if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
863 for (unsigned int i = 0; i < count; i++)
864 if (unlikely (!arrayZ[i].sanitize (c, base)))
865 return_trace (false);
868 template <typename T>
869 inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base, T user_data) const
871 TRACE_SANITIZE (this);
872 if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
873 for (unsigned int i = 0; i < count; i++)
874 if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
875 return_trace (false);
879 inline bool sanitize_shallow (hb_sanitize_context_t *c, unsigned int count) const
881 TRACE_SANITIZE (this);
882 return_trace (c->check_array (arrayZ, arrayZ[0].static_size, count));
888 DEFINE_SIZE_ARRAY (0, arrayZ);
891 /* Unsized array of offset's */
892 template <typename Type, typename OffsetType>
893 struct UnsizedOffsetArrayOf : UnsizedArrayOf<OffsetTo<Type, OffsetType> > {};
895 /* Unsized array of offsets relative to the beginning of the array itself. */
896 template <typename Type, typename OffsetType>
897 struct UnsizedOffsetListOf : UnsizedOffsetArrayOf<Type, OffsetType>
899 inline const Type& operator [] (unsigned int i) const
901 return this+this->arrayZ[i];
904 inline bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
906 TRACE_SANITIZE (this);
907 return_trace ((UnsizedOffsetArrayOf<Type, OffsetType>::sanitize (c, count, this)));
909 template <typename T>
910 inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, T user_data) const
912 TRACE_SANITIZE (this);
913 return_trace ((UnsizedOffsetArrayOf<Type, OffsetType>::sanitize (c, count, this, user_data)));
918 /* An array with a number of elements. */
919 template <typename Type, typename LenType=HBUINT16>
922 const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
924 unsigned int count = len;
925 if (unlikely (start_offset > count))
928 count -= start_offset;
929 count = MIN (count, *pcount);
931 return arrayZ + start_offset;
934 inline const Type& operator [] (unsigned int i) const
936 if (unlikely (i >= len)) return Null(Type);
939 inline Type& operator [] (unsigned int i)
941 if (unlikely (i >= len)) return Crap(Type);
944 inline unsigned int get_size (void) const
945 { return len.static_size + len * Type::static_size; }
947 inline bool serialize (hb_serialize_context_t *c,
948 unsigned int items_len)
950 TRACE_SERIALIZE (this);
951 if (unlikely (!c->extend_min (*this))) return_trace (false);
952 len.set (items_len); /* TODO(serialize) Overflow? */
953 if (unlikely (!c->extend (*this))) return_trace (false);
957 inline bool serialize (hb_serialize_context_t *c,
958 Supplier<Type> &items,
959 unsigned int items_len)
961 TRACE_SERIALIZE (this);
962 if (unlikely (!serialize (c, items_len))) return_trace (false);
963 for (unsigned int i = 0; i < items_len; i++)
964 arrayZ[i] = items[i];
969 inline bool sanitize (hb_sanitize_context_t *c) const
971 TRACE_SANITIZE (this);
972 if (unlikely (!sanitize_shallow (c))) return_trace (false);
974 /* Note: for structs that do not reference other structs,
975 * we do not need to call their sanitize() as we already did
976 * a bound check on the aggregate array size. We just include
977 * a small unreachable expression to make sure the structs
978 * pointed to do have a simple sanitize(), ie. they do not
979 * reference other structs via offsets.
981 (void) (false && arrayZ[0].sanitize (c));
985 inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
987 TRACE_SANITIZE (this);
988 if (unlikely (!sanitize_shallow (c))) return_trace (false);
989 unsigned int count = len;
990 for (unsigned int i = 0; i < count; i++)
991 if (unlikely (!arrayZ[i].sanitize (c, base)))
992 return_trace (false);
995 template <typename T>
996 inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
998 TRACE_SANITIZE (this);
999 if (unlikely (!sanitize_shallow (c))) return_trace (false);
1000 unsigned int count = len;
1001 for (unsigned int i = 0; i < count; i++)
1002 if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
1003 return_trace (false);
1004 return_trace (true);
1007 template <typename SearchType>
1008 inline int lsearch (const SearchType &x) const
1010 unsigned int count = len;
1011 for (unsigned int i = 0; i < count; i++)
1012 if (!this->arrayZ[i].cmp (x))
1017 inline void qsort (void)
1019 ::qsort (arrayZ, len, sizeof (Type), Type::cmp);
1023 inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1025 TRACE_SANITIZE (this);
1026 return_trace (len.sanitize (c) && c->check_array (arrayZ, Type::static_size, len));
1033 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
1035 template <typename Type> struct LArrayOf : ArrayOf<Type, HBUINT32> {};
1037 /* Array of Offset's */
1038 template <typename Type, typename OffsetType=HBUINT16>
1039 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};
1041 /* Array of offsets relative to the beginning of the array itself. */
1042 template <typename Type>
1043 struct OffsetListOf : OffsetArrayOf<Type>
1045 inline const Type& operator [] (unsigned int i) const
1047 if (unlikely (i >= this->len)) return Null(Type);
1048 return this+this->arrayZ[i];
1050 inline const Type& operator [] (unsigned int i)
1052 if (unlikely (i >= this->len)) return Crap(Type);
1053 return this+this->arrayZ[i];
1056 inline bool sanitize (hb_sanitize_context_t *c) const
1058 TRACE_SANITIZE (this);
1059 return_trace (OffsetArrayOf<Type>::sanitize (c, this));
1061 template <typename T>
1062 inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
1064 TRACE_SANITIZE (this);
1065 return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
1070 /* An array starting at second element. */
1071 template <typename Type, typename LenType=HBUINT16>
1072 struct HeadlessArrayOf
1074 inline const Type& operator [] (unsigned int i) const
1076 if (unlikely (i >= len || !i)) return Null(Type);
1079 inline Type& operator [] (unsigned int i)
1081 if (unlikely (i >= len || !i)) return Crap(Type);
1084 inline unsigned int get_size (void) const
1085 { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
1087 inline bool serialize (hb_serialize_context_t *c,
1088 Supplier<Type> &items,
1089 unsigned int items_len)
1091 TRACE_SERIALIZE (this);
1092 if (unlikely (!c->extend_min (*this))) return_trace (false);
1093 len.set (items_len); /* TODO(serialize) Overflow? */
1094 if (unlikely (!items_len)) return_trace (true);
1095 if (unlikely (!c->extend (*this))) return_trace (false);
1096 for (unsigned int i = 0; i < items_len - 1; i++)
1097 arrayZ[i] = items[i];
1098 items += items_len - 1;
1099 return_trace (true);
1102 inline bool sanitize (hb_sanitize_context_t *c) const
1104 TRACE_SANITIZE (this);
1105 if (unlikely (!sanitize_shallow (c))) return_trace (false);
1107 /* Note: for structs that do not reference other structs,
1108 * we do not need to call their sanitize() as we already did
1109 * a bound check on the aggregate array size. We just include
1110 * a small unreachable expression to make sure the structs
1111 * pointed to do have a simple sanitize(), ie. they do not
1112 * reference other structs via offsets.
1114 (void) (false && arrayZ[0].sanitize (c));
1116 return_trace (true);
1120 inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1122 TRACE_SANITIZE (this);
1123 return_trace (len.sanitize (c) &&
1124 (!len || c->check_array (arrayZ, Type::static_size, len - 1)));
1131 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
1136 * An array with sorted elements. Supports binary searching.
1138 template <typename Type, typename LenType=HBUINT16>
1139 struct SortedArrayOf : ArrayOf<Type, LenType>
1141 template <typename SearchType>
1142 inline int bsearch (const SearchType &x) const
1144 /* Hand-coded bsearch here since this is in the hot inner loop. */
1145 const Type *arr = this->arrayZ;
1146 int min = 0, max = (int) this->len - 1;
1149 int mid = (min + max) / 2;
1150 int c = arr[mid].cmp (x);
1163 * Binary-search arrays
1166 struct BinSearchHeader
1168 inline operator uint32_t (void) const { return len; }
1170 inline bool sanitize (hb_sanitize_context_t *c) const
1172 TRACE_SANITIZE (this);
1173 return_trace (c->check_struct (this));
1176 inline void set (unsigned int v)
1180 entrySelector.set (MAX (1u, _hb_bit_storage (v)) - 1);
1181 searchRange.set (16 * (1u << entrySelector));
1182 rangeShift.set (v * 16 > searchRange
1183 ? 16 * v - searchRange
1189 HBUINT16 searchRange;
1190 HBUINT16 entrySelector;
1191 HBUINT16 rangeShift;
1194 DEFINE_SIZE_STATIC (8);
1197 template <typename Type>
1198 struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader> {};
1201 /* Lazy struct and blob loaders. */
1203 /* Logic is shared between hb_lazy_loader_t and hb_table_lazy_loader_t */
1204 template <typename T>
1205 struct hb_lazy_loader_t
1207 inline void init (hb_face_t *face_)
1213 inline void fini (void)
1215 if (instance && instance != &Null(T))
1222 inline const T* get (void) const
1225 T *p = (T *) hb_atomic_ptr_get (&instance);
1228 p = (T *) calloc (1, sizeof (T));
1230 p = const_cast<T *> (&Null(T));
1233 if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p)))
1243 inline const T* operator-> (void) const
1253 /* Logic is shared between hb_lazy_loader_t and hb_table_lazy_loader_t */
1254 template <typename T>
1255 struct hb_table_lazy_loader_t
1257 inline void init (hb_face_t *face_)
1263 inline void fini (void)
1265 hb_blob_destroy (blob);
1268 inline const T* get (void) const
1271 hb_blob_t *blob_ = (hb_blob_t *) hb_atomic_ptr_get (&blob);
1272 if (unlikely (!blob_))
1274 blob_ = OT::Sanitizer<T>().sanitize (face->reference_table (T::tableTag));
1275 if (!hb_atomic_ptr_cmpexch (&blob, nullptr, blob_))
1277 hb_blob_destroy (blob_);
1282 return blob_->as<T> ();
1285 inline const T* operator-> (void) const
1292 mutable hb_blob_t *blob;
1296 } /* namespace OT */
1299 #endif /* HB_OPEN_TYPE_PRIVATE_HH */