2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
32 #include "hb-private.hh"
43 /* Cast to struct T, reference to reference */
44 template<typename Type, typename TObject>
45 static inline const Type& CastR(const TObject &X)
46 { return reinterpret_cast<const Type&> (X); }
47 template<typename Type, typename TObject>
48 static inline Type& CastR(TObject &X)
49 { return reinterpret_cast<Type&> (X); }
51 /* Cast to struct T, pointer to pointer */
52 template<typename Type, typename TObject>
53 static inline const Type* CastP(const TObject *X)
54 { return reinterpret_cast<const Type*> (X); }
55 template<typename Type, typename TObject>
56 static inline Type* CastP(TObject *X)
57 { return reinterpret_cast<Type*> (X); }
59 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
60 * location pointed to by P plus Ofs bytes. */
61 template<typename Type>
62 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
63 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
64 template<typename Type>
65 static inline Type& StructAtOffset(void *P, unsigned int offset)
66 { return * reinterpret_cast<Type*> ((char *) P + offset); }
68 /* StructAfter<T>(X) returns the struct T& that is placed after X.
69 * Works with X of variable size also. X must implement get_size() */
70 template<typename Type, typename TObject>
71 static inline const Type& StructAfter(const TObject &X)
72 { return StructAtOffset<Type>(&X, X.get_size()); }
73 template<typename Type, typename TObject>
74 static inline Type& StructAfter(TObject &X)
75 { return StructAtOffset<Type>(&X, X.get_size()); }
83 /* Check _assertion in a method environment */
84 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
85 inline void _instance_assertion_on_line_##_line (void) const \
87 ASSERT_STATIC (_assertion); \
88 ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
90 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
91 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
93 /* Check that _code compiles in a method environment */
94 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
95 inline void _compiles_assertion_on_line_##_line (void) const \
97 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
98 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
101 #define DEFINE_SIZE_STATIC(size) \
102 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
103 static const unsigned int static_size = (size); \
104 static const unsigned int min_size = (size); \
105 inline unsigned int get_size (void) const { return (size); }
107 #define DEFINE_SIZE_UNION(size, _member) \
108 DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \
109 static const unsigned int min_size = (size)
111 #define DEFINE_SIZE_MIN(size) \
112 DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
113 static const unsigned int min_size = (size)
115 #define DEFINE_SIZE_ARRAY(size, array) \
116 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
117 DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
118 static const unsigned int min_size = (size)
120 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
121 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
122 DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
123 static const unsigned int min_size = (size)
131 /* Global nul-content Null pool. Enlarge as necessary. */
132 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
133 static const void *_NullPool[(256+8) / sizeof (void *)];
135 /* Generic nul-content Null objects. */
136 template <typename Type>
137 static inline const Type& Null (void) {
138 ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
139 return *CastP<Type> (_NullPool);
142 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
143 #define DEFINE_NULL_DATA(Type, data) \
144 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
146 /*static*/ inline const Type& Null<Type> (void) { \
147 return *CastP<Type> (_Null##Type); \
148 } /* The following line really exists such that we end in a place needing semicolon */ \
149 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
151 /* Accessor macro. */
152 #define Null(Type) Null<Type>()
159 template <typename Context, typename Return, unsigned int MaxDebugDepth>
160 struct hb_dispatch_context_t
162 static const unsigned int max_debug_depth = MaxDebugDepth;
163 typedef Return return_t;
164 template <typename T, typename F>
165 inline bool may_dispatch (const T *obj, const F *format) { return true; }
166 static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
174 #ifndef HB_DEBUG_SANITIZE
175 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
179 #define TRACE_SANITIZE(this) \
180 hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
181 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
184 /* This limits sanitizing time on really broken fonts. */
185 #ifndef HB_SANITIZE_MAX_EDITS
186 #define HB_SANITIZE_MAX_EDITS 32
189 struct hb_sanitize_context_t :
190 hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
192 inline hb_sanitize_context_t (void) :
194 start (NULL), end (NULL),
195 writable (false), edit_count (0),
198 inline const char *get_name (void) { return "SANITIZE"; }
199 template <typename T, typename F>
200 inline bool may_dispatch (const T *obj, const F *format)
201 { return format->sanitize (this); }
202 template <typename T>
203 inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
204 static return_t default_return_value (void) { return true; }
205 static return_t no_dispatch_return_value (void) { return false; }
206 bool stop_sublookup_iteration (const return_t r) const { return !r; }
208 inline void init (hb_blob_t *b)
210 this->blob = hb_blob_reference (b);
211 this->writable = false;
214 inline void start_processing (void)
216 this->start = hb_blob_get_data (this->blob, NULL);
217 this->end = this->start + hb_blob_get_length (this->blob);
218 assert (this->start <= this->end); /* Must not overflow. */
219 this->edit_count = 0;
220 this->debug_depth = 0;
222 DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
223 "start [%p..%p] (%lu bytes)",
224 this->start, this->end,
225 (unsigned long) (this->end - this->start));
228 inline void end_processing (void)
230 DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
231 "end [%p..%p] %u edit requests",
232 this->start, this->end, this->edit_count);
234 hb_blob_destroy (this->blob);
236 this->start = this->end = NULL;
239 inline bool check_range (const void *base, unsigned int len) const
241 const char *p = (const char *) base;
242 bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
244 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
245 "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
247 this->start, this->end,
248 ok ? "OK" : "OUT-OF-RANGE");
253 inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
255 const char *p = (const char *) base;
256 bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
257 unsigned int array_size = record_size * len;
258 bool ok = !overflows && this->check_range (base, array_size);
260 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
261 "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
262 p, p + (record_size * len), record_size, len, (unsigned int) array_size,
263 this->start, this->end,
264 overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
269 template <typename Type>
270 inline bool check_struct (const Type *obj) const
272 return likely (this->check_range (obj, obj->min_size));
275 inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
277 if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
280 const char *p = (const char *) base;
283 DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
284 "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
287 this->start, this->end,
288 this->writable ? "GRANTED" : "DENIED");
290 return this->writable;
293 template <typename Type, typename ValueType>
294 inline bool try_set (const Type *obj, const ValueType &v) {
295 if (this->may_edit (obj, obj->static_size)) {
296 const_cast<Type *> (obj)->set (v);
302 mutable unsigned int debug_depth;
303 const char *start, *end;
305 unsigned int edit_count;
311 /* Template to sanitize an object. */
312 template <typename Type>
315 static hb_blob_t *sanitize (hb_blob_t *blob) {
316 hb_sanitize_context_t c[1];
319 /* TODO is_sane() stuff */
324 DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
326 c->start_processing ();
328 if (unlikely (!c->start)) {
329 c->end_processing ();
333 Type *t = CastP<Type> (const_cast<char *> (c->start));
335 sane = t->sanitize (c);
338 DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
340 /* sanitize again to ensure no toe-stepping */
342 sane = t->sanitize (c);
344 DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
349 unsigned int edit_count = c->edit_count;
350 if (edit_count && !c->writable) {
351 c->start = hb_blob_get_data_writable (blob, NULL);
352 c->end = c->start + hb_blob_get_length (blob);
356 /* ok, we made it writable by relocating. try again */
357 DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
363 c->end_processing ();
365 DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
369 hb_blob_destroy (blob);
370 return hb_blob_get_empty ();
374 static const Type* lock_instance (hb_blob_t *blob) {
375 hb_blob_make_immutable (blob);
376 const char *base = hb_blob_get_data (blob, NULL);
377 return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
387 #ifndef HB_DEBUG_SERIALIZE
388 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
392 #define TRACE_SERIALIZE(this) \
393 hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
394 (&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
398 struct hb_serialize_context_t
400 inline hb_serialize_context_t (void *start_, unsigned int size)
402 this->start = (char *) start_;
403 this->end = this->start + size;
405 this->ran_out_of_room = false;
406 this->head = this->start;
407 this->debug_depth = 0;
410 template <typename Type>
411 inline Type *start_serialize (void)
413 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
414 "start [%p..%p] (%lu bytes)",
415 this->start, this->end,
416 (unsigned long) (this->end - this->start));
418 return start_embed<Type> ();
421 inline void end_serialize (void)
423 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
424 "end [%p..%p] serialized %d bytes; %s",
425 this->start, this->end,
426 (int) (this->head - this->start),
427 this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
431 template <typename Type>
432 inline Type *copy (void)
434 assert (!this->ran_out_of_room);
435 unsigned int len = this->head - this->start;
436 void *p = malloc (len);
438 memcpy (p, this->start, len);
439 return reinterpret_cast<Type *> (p);
442 template <typename Type>
443 inline Type *allocate_size (unsigned int size)
445 if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
446 this->ran_out_of_room = true;
449 memset (this->head, 0, size);
450 char *ret = this->head;
452 return reinterpret_cast<Type *> (ret);
455 template <typename Type>
456 inline Type *allocate_min (void)
458 return this->allocate_size<Type> (Type::min_size);
461 template <typename Type>
462 inline Type *start_embed (void)
464 Type *ret = reinterpret_cast<Type *> (this->head);
468 template <typename Type>
469 inline Type *embed (const Type &obj)
471 unsigned int size = obj.get_size ();
472 Type *ret = this->allocate_size<Type> (size);
473 if (unlikely (!ret)) return NULL;
474 memcpy (ret, obj, size);
478 template <typename Type>
479 inline Type *extend_min (Type &obj)
481 unsigned int size = obj.min_size;
482 assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
483 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
484 return reinterpret_cast<Type *> (&obj);
487 template <typename Type>
488 inline Type *extend (Type &obj)
490 unsigned int size = obj.get_size ();
491 assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
492 if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
493 return reinterpret_cast<Type *> (&obj);
496 inline void truncate (void *new_head)
498 assert (this->start < new_head && new_head <= this->head);
499 this->head = (char *) new_head;
502 unsigned int debug_depth;
503 char *start, *end, *head;
504 bool ran_out_of_room;
507 template <typename Type>
510 inline Supplier (const Type *array, unsigned int len_)
515 inline const Type operator [] (unsigned int i) const
517 if (unlikely (i >= len)) return Type ();
521 inline void advance (unsigned int count)
523 if (unlikely (count > len))
530 inline Supplier (const Supplier<Type> &); /* Disallow copy */
531 inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
542 * The OpenType Font File: Data Types
546 /* "The following data types are used in the OpenType font file.
547 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
554 template <typename Type, int Bytes> struct BEInt;
556 template <typename Type>
557 struct BEInt<Type, 1>
560 inline void set (Type V)
564 inline operator Type (void) const
570 template <typename Type>
571 struct BEInt<Type, 2>
574 inline void set (Type V)
576 v[0] = (V >> 8) & 0xFF;
579 inline operator Type (void) const
584 private: uint8_t v[2];
586 template <typename Type>
587 struct BEInt<Type, 3>
590 inline void set (Type V)
592 v[0] = (V >> 16) & 0xFF;
593 v[1] = (V >> 8) & 0xFF;
596 inline operator Type (void) const
602 private: uint8_t v[3];
604 template <typename Type>
605 struct BEInt<Type, 4>
608 inline void set (Type V)
610 v[0] = (V >> 24) & 0xFF;
611 v[1] = (V >> 16) & 0xFF;
612 v[2] = (V >> 8) & 0xFF;
615 inline operator Type (void) const
622 private: uint8_t v[4];
625 /* Integer types in big-endian order and no alignment requirement */
626 template <typename Type, unsigned int Size>
629 inline void set (Type i) { v.set (i); }
630 inline operator Type(void) const { return v; }
631 inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
632 inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
633 static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
634 inline int cmp (Type a) const
637 if (sizeof (Type) < sizeof (int))
638 return (int) a - (int) b;
640 return a < b ? -1 : a == b ? 0 : +1;
642 inline bool sanitize (hb_sanitize_context_t *c) const
644 TRACE_SANITIZE (this);
645 return_trace (likely (c->check_struct (this)));
650 DEFINE_SIZE_STATIC (Size);
653 typedef IntType<uint8_t , 1> BYTE; /* 8-bit unsigned integer. */
654 typedef IntType<uint16_t, 2> USHORT; /* 16-bit unsigned integer. */
655 typedef IntType<int16_t, 2> SHORT; /* 16-bit signed integer. */
656 typedef IntType<uint32_t, 4> ULONG; /* 32-bit unsigned integer. */
657 typedef IntType<int32_t, 4> LONG; /* 32-bit signed integer. */
658 typedef IntType<uint32_t, 3> UINT24; /* 24-bit unsigned integer. */
660 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
663 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
664 typedef USHORT UFWORD;
666 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
667 struct F2DOT14 : SHORT
669 //inline float to_float (void) const { return ???; }
670 //inline void set_float (float f) { v.set (f * ???); }
672 DEFINE_SIZE_STATIC (2);
675 /* 32-bit signed fixed-point number (16.16). */
678 //inline float to_float (void) const { return ???; }
679 //inline void set_float (float f) { v.set (f * ???); }
681 DEFINE_SIZE_STATIC (4);
684 /* Date represented in number of seconds since 12:00 midnight, January 1,
685 * 1904. The value is represented as a signed 64-bit integer. */
688 inline bool sanitize (hb_sanitize_context_t *c) const
690 TRACE_SANITIZE (this);
691 return_trace (likely (c->check_struct (this)));
697 DEFINE_SIZE_STATIC (8);
700 /* Array of four uint8s (length = 32 bits) used to identify a script, language
701 * system, feature, or baseline */
704 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */
705 inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
706 inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
708 DEFINE_SIZE_STATIC (4);
710 DEFINE_NULL_DATA (Tag, " ");
712 /* Glyph index number, same as uint16 (length = 16 bits) */
713 struct GlyphID : USHORT {
714 static inline int cmp (const GlyphID *a, const GlyphID *b) { return b->USHORT::cmp (*a); }
715 inline int cmp (hb_codepoint_t a) const { return (int) a - (int) *this; }
718 /* Script/language-system/feature index */
719 struct Index : USHORT {
720 static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
722 DEFINE_NULL_DATA (Index, "\xff\xff");
724 /* Offset, Null offset = 0 */
725 template <typename Type=USHORT>
728 inline bool is_null (void) const { return 0 == *this; }
730 DEFINE_SIZE_STATIC (sizeof(Type));
735 struct CheckSum : ULONG
737 /* This is reference implementation from the spec. */
738 static inline uint32_t CalcTableChecksum (const ULONG *Table, uint32_t Length)
741 const ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size;
743 while (Table < EndPtr)
748 /* Note: data should be 4byte aligned and have 4byte padding at the end. */
749 inline void set_for_data (const void *data, unsigned int length)
750 { set (CalcTableChecksum ((const ULONG *) data, length)); }
753 DEFINE_SIZE_STATIC (4);
761 template <typename FixedType=USHORT>
764 inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
766 inline bool sanitize (hb_sanitize_context_t *c) const
768 TRACE_SANITIZE (this);
769 return_trace (c->check_struct (this));
775 DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
781 * Template subclasses of Offset that do the dereferencing.
785 template <typename Type, typename OffsetType=USHORT>
786 struct OffsetTo : Offset<OffsetType>
788 inline const Type& operator () (const void *base) const
790 unsigned int offset = *this;
791 if (unlikely (!offset)) return Null(Type);
792 return StructAtOffset<Type> (base, offset);
795 inline Type& serialize (hb_serialize_context_t *c, const void *base)
797 Type *t = c->start_embed<Type> ();
798 this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
802 inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
804 TRACE_SANITIZE (this);
805 if (unlikely (!c->check_struct (this))) return_trace (false);
806 unsigned int offset = *this;
807 if (unlikely (!offset)) return_trace (true);
808 const Type &obj = StructAtOffset<Type> (base, offset);
809 return_trace (likely (obj.sanitize (c)) || neuter (c));
811 template <typename T>
812 inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
814 TRACE_SANITIZE (this);
815 if (unlikely (!c->check_struct (this))) return_trace (false);
816 unsigned int offset = *this;
817 if (unlikely (!offset)) return_trace (true);
818 const Type &obj = StructAtOffset<Type> (base, offset);
819 return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
822 /* Set the offset to Null */
823 inline bool neuter (hb_sanitize_context_t *c) const {
824 return c->try_set (this, 0);
826 DEFINE_SIZE_STATIC (sizeof(OffsetType));
828 template <typename Base, typename OffsetType, typename Type>
829 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
830 template <typename Base, typename OffsetType, typename Type>
831 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
838 /* An array with a number of elements. */
839 template <typename Type, typename LenType=USHORT>
842 const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
844 unsigned int count = len;
845 if (unlikely (start_offset > count))
848 count -= start_offset;
849 count = MIN (count, *pcount);
851 return array + start_offset;
854 inline const Type& operator [] (unsigned int i) const
856 if (unlikely (i >= len)) return Null(Type);
859 inline Type& operator [] (unsigned int i)
863 inline unsigned int get_size (void) const
864 { return len.static_size + len * Type::static_size; }
866 inline bool serialize (hb_serialize_context_t *c,
867 unsigned int items_len)
869 TRACE_SERIALIZE (this);
870 if (unlikely (!c->extend_min (*this))) return_trace (false);
871 len.set (items_len); /* TODO(serialize) Overflow? */
872 if (unlikely (!c->extend (*this))) return_trace (false);
876 inline bool serialize (hb_serialize_context_t *c,
877 Supplier<Type> &items,
878 unsigned int items_len)
880 TRACE_SERIALIZE (this);
881 if (unlikely (!serialize (c, items_len))) return_trace (false);
882 for (unsigned int i = 0; i < items_len; i++)
884 items.advance (items_len);
888 inline bool sanitize (hb_sanitize_context_t *c) const
890 TRACE_SANITIZE (this);
891 if (unlikely (!sanitize_shallow (c))) return_trace (false);
893 /* Note: for structs that do not reference other structs,
894 * we do not need to call their sanitize() as we already did
895 * a bound check on the aggregate array size. We just include
896 * a small unreachable expression to make sure the structs
897 * pointed to do have a simple sanitize(), ie. they do not
898 * reference other structs via offsets.
900 (void) (false && array[0].sanitize (c));
904 inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
906 TRACE_SANITIZE (this);
907 if (unlikely (!sanitize_shallow (c))) return_trace (false);
908 unsigned int count = len;
909 for (unsigned int i = 0; i < count; i++)
910 if (unlikely (!array[i].sanitize (c, base)))
911 return_trace (false);
914 template <typename T>
915 inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
917 TRACE_SANITIZE (this);
918 if (unlikely (!sanitize_shallow (c))) return_trace (false);
919 unsigned int count = len;
920 for (unsigned int i = 0; i < count; i++)
921 if (unlikely (!array[i].sanitize (c, base, user_data)))
922 return_trace (false);
926 template <typename SearchType>
927 inline int lsearch (const SearchType &x) const
929 unsigned int count = len;
930 for (unsigned int i = 0; i < count; i++)
931 if (!this->array[i].cmp (x))
937 inline bool sanitize_shallow (hb_sanitize_context_t *c) const
939 TRACE_SANITIZE (this);
940 return_trace (c->check_struct (this) && c->check_array (array, Type::static_size, len));
947 DEFINE_SIZE_ARRAY (sizeof (LenType), array);
950 /* Array of Offset's */
951 template <typename Type>
952 struct OffsetArrayOf : ArrayOf<OffsetTo<Type> > {};
954 /* Array of offsets relative to the beginning of the array itself. */
955 template <typename Type>
956 struct OffsetListOf : OffsetArrayOf<Type>
958 inline const Type& operator [] (unsigned int i) const
960 if (unlikely (i >= this->len)) return Null(Type);
961 return this+this->array[i];
964 inline bool sanitize (hb_sanitize_context_t *c) const
966 TRACE_SANITIZE (this);
967 return_trace (OffsetArrayOf<Type>::sanitize (c, this));
969 template <typename T>
970 inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
972 TRACE_SANITIZE (this);
973 return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
978 /* An array starting at second element. */
979 template <typename Type, typename LenType=USHORT>
980 struct HeadlessArrayOf
982 inline const Type& operator [] (unsigned int i) const
984 if (unlikely (i >= len || !i)) return Null(Type);
987 inline unsigned int get_size (void) const
988 { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
990 inline bool serialize (hb_serialize_context_t *c,
991 Supplier<Type> &items,
992 unsigned int items_len)
994 TRACE_SERIALIZE (this);
995 if (unlikely (!c->extend_min (*this))) return_trace (false);
996 len.set (items_len); /* TODO(serialize) Overflow? */
997 if (unlikely (!items_len)) return_trace (true);
998 if (unlikely (!c->extend (*this))) return_trace (false);
999 for (unsigned int i = 0; i < items_len - 1; i++)
1000 array[i] = items[i];
1001 items.advance (items_len - 1);
1002 return_trace (true);
1005 inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1007 return c->check_struct (this)
1008 && c->check_array (this, Type::static_size, len);
1011 inline bool sanitize (hb_sanitize_context_t *c) const
1013 TRACE_SANITIZE (this);
1014 if (unlikely (!sanitize_shallow (c))) return_trace (false);
1016 /* Note: for structs that do not reference other structs,
1017 * we do not need to call their sanitize() as we already did
1018 * a bound check on the aggregate array size. We just include
1019 * a small unreachable expression to make sure the structs
1020 * pointed to do have a simple sanitize(), ie. they do not
1021 * reference other structs via offsets.
1023 (void) (false && array[0].sanitize (c));
1025 return_trace (true);
1031 DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1035 /* An array with sorted elements. Supports binary searching. */
1036 template <typename Type, typename LenType=USHORT>
1037 struct SortedArrayOf : ArrayOf<Type, LenType>
1039 template <typename SearchType>
1040 inline int bsearch (const SearchType &x) const
1042 /* Hand-coded bsearch here since this is in the hot inner loop. */
1043 int min = 0, max = (int) this->len - 1;
1046 int mid = (min + max) / 2;
1047 int c = this->array[mid].cmp (x);
1060 } /* namespace OT */
1063 #endif /* HB_OPEN_TYPE_PRIVATE_HH */