2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-table.hh"
34 #include "hb-set-private.hh"
40 #ifndef HB_DEBUG_CLOSURE
41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
44 #define TRACE_CLOSURE(this) \
45 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \
46 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
49 struct hb_closure_context_t :
50 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE>
52 inline const char *get_name (void) { return "CLOSURE"; }
53 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
55 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
56 static return_t default_return_value (void) { return HB_VOID; }
57 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
58 return_t recurse (unsigned int lookup_index)
60 if (unlikely (nesting_level_left == 0 || !recurse_func))
61 return default_return_value ();
64 recurse_func (this, lookup_index);
71 recurse_func_t recurse_func;
72 unsigned int nesting_level_left;
73 unsigned int debug_depth;
75 hb_closure_context_t (hb_face_t *face_,
77 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
81 nesting_level_left (nesting_level_left_),
84 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
89 #ifndef HB_DEBUG_WOULD_APPLY
90 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
93 #define TRACE_WOULD_APPLY(this) \
94 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
95 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
98 struct hb_would_apply_context_t :
99 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY>
101 inline const char *get_name (void) { return "WOULD_APPLY"; }
102 template <typename T>
103 inline return_t dispatch (const T &obj) { return obj.would_apply (this); }
104 static return_t default_return_value (void) { return false; }
105 bool stop_sublookup_iteration (return_t r) const { return r; }
108 const hb_codepoint_t *glyphs;
111 unsigned int debug_depth;
113 hb_would_apply_context_t (hb_face_t *face_,
114 const hb_codepoint_t *glyphs_,
116 bool zero_context_) :
120 zero_context (zero_context_),
126 #ifndef HB_DEBUG_COLLECT_GLYPHS
127 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0)
130 #define TRACE_COLLECT_GLYPHS(this) \
131 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \
132 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
135 struct hb_collect_glyphs_context_t :
136 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS>
138 inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
139 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
140 template <typename T>
141 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
142 static return_t default_return_value (void) { return HB_VOID; }
143 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
144 return_t recurse (unsigned int lookup_index)
146 if (unlikely (nesting_level_left == 0 || !recurse_func))
147 return default_return_value ();
149 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get
150 * past the previous check. For GSUB, we only want to collect the output
151 * glyphs in the recursion. If output is not requested, we can go home now.
153 * Note further, that the above is not exactly correct. A recursed lookup
154 * is allowed to match input that is not matched in the context, but that's
155 * not how most fonts are built. It's possible to relax that and recurse
156 * with all sets here if it proves to be an issue.
159 if (output == hb_set_get_empty ())
162 /* Return if new lookup was recursed to before. */
163 if (recursed_lookups.has (lookup_index))
166 hb_set_t *old_before = before;
167 hb_set_t *old_input = input;
168 hb_set_t *old_after = after;
169 before = input = after = hb_set_get_empty ();
171 nesting_level_left--;
172 recurse_func (this, lookup_index);
173 nesting_level_left++;
179 recursed_lookups.add (lookup_index);
189 recurse_func_t recurse_func;
190 hb_set_t recursed_lookups;
191 unsigned int nesting_level_left;
192 unsigned int debug_depth;
194 hb_collect_glyphs_context_t (hb_face_t *face_,
195 hb_set_t *glyphs_before, /* OUT. May be NULL */
196 hb_set_t *glyphs_input, /* OUT. May be NULL */
197 hb_set_t *glyphs_after, /* OUT. May be NULL */
198 hb_set_t *glyphs_output, /* OUT. May be NULL */
199 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
201 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
202 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
203 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
204 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
207 nesting_level_left (nesting_level_left_),
210 recursed_lookups.init ();
212 ~hb_collect_glyphs_context_t (void)
214 recursed_lookups.fini ();
217 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
222 #ifndef HB_DEBUG_GET_COVERAGE
223 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0)
226 /* XXX Can we remove this? */
228 template <typename set_t>
229 struct hb_add_coverage_context_t :
230 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE>
232 inline const char *get_name (void) { return "GET_COVERAGE"; }
233 typedef const Coverage &return_t;
234 template <typename T>
235 inline return_t dispatch (const T &obj) { return obj.get_coverage (); }
236 static return_t default_return_value (void) { return Null(Coverage); }
237 bool stop_sublookup_iteration (return_t r) const
239 r.add_coverage (set);
243 hb_add_coverage_context_t (set_t *set_) :
248 unsigned int debug_depth;
253 #ifndef HB_DEBUG_APPLY
254 #define HB_DEBUG_APPLY (HB_DEBUG+0)
257 #define TRACE_APPLY(this) \
258 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \
259 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
260 "idx %d gid %u lookup %d", \
261 c->buffer->idx, c->buffer->cur().codepoint, (int) c->lookup_index);
263 struct hb_apply_context_t :
264 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY>
268 inline matcher_t (void) :
273 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
277 match_data (NULL) {};
279 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
281 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
282 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
283 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
284 inline void set_mask (hb_mask_t mask_) { mask = mask_; }
285 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; }
286 inline void set_match_func (match_func_t match_func_,
287 const void *match_data_)
288 { match_func = match_func_; match_data = match_data_; }
296 inline may_match_t may_match (const hb_glyph_info_t &info,
297 const USHORT *glyph_data) const
299 if (!(info.mask & mask) ||
300 (syllable && syllable != info.syllable ()))
304 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
316 may_skip (const hb_apply_context_t *c,
317 const hb_glyph_info_t &info) const
319 if (!c->check_glyph_property (&info, lookup_props))
322 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_fvs (&info) &&
323 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
324 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
331 unsigned int lookup_props;
336 match_func_t match_func;
337 const void *match_data;
340 struct skipping_iterator_t
342 inline void init (hb_apply_context_t *c_, bool context_match = false)
345 match_glyph_data = NULL,
346 matcher.set_match_func (NULL, NULL);
347 matcher.set_lookup_props (c->lookup_props);
348 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
349 matcher.set_ignore_zwnj (context_match || c->table_index == 1);
350 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
351 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
352 matcher.set_mask (context_match ? -1 : c->lookup_mask);
354 inline void set_lookup_props (unsigned int lookup_props)
356 matcher.set_lookup_props (lookup_props);
358 inline void set_match_func (matcher_t::match_func_t match_func_,
359 const void *match_data_,
360 const USHORT glyph_data[])
362 matcher.set_match_func (match_func_, match_data_);
363 match_glyph_data = glyph_data;
366 inline void reset (unsigned int start_index_,
367 unsigned int num_items_)
370 num_items = num_items_;
371 end = c->buffer->len;
372 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
375 inline void reject (void) { num_items++; match_glyph_data--; }
377 inline bool next (void)
379 assert (num_items > 0);
380 while (idx + num_items < end)
383 const hb_glyph_info_t &info = c->buffer->info[idx];
385 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
386 if (unlikely (skip == matcher_t::SKIP_YES))
389 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
390 if (match == matcher_t::MATCH_YES ||
391 (match == matcher_t::MATCH_MAYBE &&
392 skip == matcher_t::SKIP_NO))
399 if (skip == matcher_t::SKIP_NO)
404 inline bool prev (void)
406 assert (num_items > 0);
407 while (idx >= num_items)
410 const hb_glyph_info_t &info = c->buffer->out_info[idx];
412 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
413 if (unlikely (skip == matcher_t::SKIP_YES))
416 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
417 if (match == matcher_t::MATCH_YES ||
418 (match == matcher_t::MATCH_MAYBE &&
419 skip == matcher_t::SKIP_NO))
426 if (skip == matcher_t::SKIP_NO)
434 hb_apply_context_t *c;
436 const USHORT *match_glyph_data;
438 unsigned int num_items;
443 inline const char *get_name (void) { return "APPLY"; }
444 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
445 template <typename T>
446 inline return_t dispatch (const T &obj) { return obj.apply (this); }
447 static return_t default_return_value (void) { return false; }
448 bool stop_sublookup_iteration (return_t r) const { return r; }
449 return_t recurse (unsigned int lookup_index)
451 if (unlikely (nesting_level_left == 0 || !recurse_func))
452 return default_return_value ();
454 nesting_level_left--;
455 bool ret = recurse_func (this, lookup_index);
456 nesting_level_left++;
460 unsigned int table_index; /* GSUB/GPOS */
464 hb_direction_t direction;
465 hb_mask_t lookup_mask;
467 recurse_func_t recurse_func;
468 unsigned int nesting_level_left;
469 unsigned int lookup_props;
471 bool has_glyph_classes;
472 skipping_iterator_t iter_input, iter_context;
473 unsigned int lookup_index;
474 unsigned int debug_depth;
477 hb_apply_context_t (unsigned int table_index_,
479 hb_buffer_t *buffer_) :
480 table_index (table_index_),
481 font (font_), face (font->face), buffer (buffer_),
482 direction (buffer_->props.direction),
486 nesting_level_left (HB_MAX_NESTING_LEVEL),
488 gdef (*hb_ot_layout_from_face (face)->gdef),
489 has_glyph_classes (gdef.has_glyph_classes ()),
492 lookup_index ((unsigned int) -1),
495 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; }
496 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; }
497 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; }
498 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
499 inline void set_lookup_props (unsigned int lookup_props_)
501 lookup_props = lookup_props_;
502 iter_input.init (this, false);
503 iter_context.init (this, true);
507 match_properties_mark (hb_codepoint_t glyph,
508 unsigned int glyph_props,
509 unsigned int match_props) const
511 /* If using mark filtering sets, the high short of
512 * match_props has the set index.
514 if (match_props & LookupFlag::UseMarkFilteringSet)
515 return gdef.mark_set_covers (match_props >> 16, glyph);
517 /* The second byte of match_props has the meaning
518 * "ignore marks of attachment type different than
519 * the attachment type specified."
521 if (match_props & LookupFlag::MarkAttachmentType)
522 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
528 check_glyph_property (const hb_glyph_info_t *info,
529 unsigned int match_props) const
531 hb_codepoint_t glyph = info->codepoint;
532 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
534 /* Not covered, if, for example, glyph class is ligature and
535 * match_props includes LookupFlags::IgnoreLigatures
537 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
540 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
541 return match_properties_mark (glyph, glyph_props, match_props);
546 inline void _set_glyph_props (hb_codepoint_t glyph_index,
547 unsigned int class_guess = 0,
548 bool ligature = false,
549 bool component = false) const
551 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
552 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
553 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
556 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
557 /* In the only place that the MULTIPLIED bit is used, Uniscribe
558 * seems to only care about the "last" transformation between
559 * Ligature and Multiple substitions. Ie. if you ligate, expand,
560 * and ligate again, it forgives the multiplication and acts as
561 * if only ligation happened. As such, clear MULTIPLIED bit.
563 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
566 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
567 if (likely (has_glyph_classes))
568 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
569 else if (class_guess)
570 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
573 inline void replace_glyph (hb_codepoint_t glyph_index) const
575 _set_glyph_props (glyph_index);
576 buffer->replace_glyph (glyph_index);
578 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const
580 _set_glyph_props (glyph_index);
581 buffer->cur().codepoint = glyph_index;
583 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
584 unsigned int class_guess) const
586 _set_glyph_props (glyph_index, class_guess, true);
587 buffer->replace_glyph (glyph_index);
589 inline void output_glyph_for_component (hb_codepoint_t glyph_index,
590 unsigned int class_guess) const
592 _set_glyph_props (glyph_index, class_guess, false, true);
593 buffer->output_glyph (glyph_index);
599 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
600 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
601 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
603 struct ContextClosureFuncs
605 intersects_func_t intersects;
607 struct ContextCollectGlyphsFuncs
609 collect_glyphs_func_t collect;
611 struct ContextApplyFuncs
617 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
619 return glyphs->has (value);
621 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
623 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
624 return class_def.intersects_class (glyphs, value);
626 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
628 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
629 return (data+coverage).intersects (glyphs);
632 static inline bool intersects_array (hb_closure_context_t *c,
634 const USHORT values[],
635 intersects_func_t intersects_func,
636 const void *intersects_data)
638 for (unsigned int i = 0; i < count; i++)
639 if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
645 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
649 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data)
651 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
652 class_def.add_class (glyphs, value);
654 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
656 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
657 (data+coverage).add_coverage (glyphs);
659 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
662 const USHORT values[],
663 collect_glyphs_func_t collect_func,
664 const void *collect_data)
666 for (unsigned int i = 0; i < count; i++)
667 collect_func (glyphs, values[i], collect_data);
671 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
673 return glyph_id == value;
675 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
677 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
678 return class_def.get_class (glyph_id) == value;
680 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
682 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
683 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
686 static inline bool would_match_input (hb_would_apply_context_t *c,
687 unsigned int count, /* Including the first glyph (not matched) */
688 const USHORT input[], /* Array of input values--start with second glyph */
689 match_func_t match_func,
690 const void *match_data)
695 for (unsigned int i = 1; i < count; i++)
696 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
701 static inline bool match_input (hb_apply_context_t *c,
702 unsigned int count, /* Including the first glyph (not matched) */
703 const USHORT input[], /* Array of input values--start with second glyph */
704 match_func_t match_func,
705 const void *match_data,
706 unsigned int *end_offset,
707 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
708 bool *p_is_mark_ligature = NULL,
709 unsigned int *p_total_component_count = NULL)
713 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
715 hb_buffer_t *buffer = c->buffer;
717 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
718 skippy_iter.reset (buffer->idx, count - 1);
719 skippy_iter.set_match_func (match_func, match_data, input);
722 * This is perhaps the trickiest part of OpenType... Remarks:
724 * - If all components of the ligature were marks, we call this a mark ligature.
726 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
727 * it as a ligature glyph.
729 * - Ligatures cannot be formed across glyphs attached to different components
730 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
731 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
732 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o
733 * There is an exception to this: If a ligature tries ligating with marks that
734 * belong to it itself, go ahead, assuming that the font designer knows what
735 * they are doing (otherwise it can break Indic stuff when a matra wants to
736 * ligate with a conjunct...)
739 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur());
741 unsigned int total_component_count = 0;
742 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
744 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
745 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
747 match_positions[0] = buffer->idx;
748 for (unsigned int i = 1; i < count; i++)
750 if (!skippy_iter.next ()) return_trace (false);
752 match_positions[i] = skippy_iter.idx;
754 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
755 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
757 if (first_lig_id && first_lig_comp) {
758 /* If first component was attached to a previous ligature component,
759 * all subsequent components should be attached to the same ligature
760 * component, otherwise we shouldn't ligate them. */
761 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
762 return_trace (false);
764 /* If first component was NOT attached to a previous ligature component,
765 * all subsequent components should also NOT be attached to any ligature
766 * component, unless they are attached to the first component itself! */
767 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
768 return_trace (false);
771 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
772 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
775 *end_offset = skippy_iter.idx - buffer->idx + 1;
777 if (p_is_mark_ligature)
778 *p_is_mark_ligature = is_mark_ligature;
780 if (p_total_component_count)
781 *p_total_component_count = total_component_count;
785 static inline bool ligate_input (hb_apply_context_t *c,
786 unsigned int count, /* Including the first glyph */
787 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
788 unsigned int match_length,
789 hb_codepoint_t lig_glyph,
790 bool is_mark_ligature,
791 unsigned int total_component_count)
795 hb_buffer_t *buffer = c->buffer;
797 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
800 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
801 * the ligature to keep its old ligature id. This will allow it to attach to
802 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
803 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
804 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
805 * later, we don't want them to lose their ligature id/component, otherwise
806 * GPOS will fail to correctly position the mark ligature on top of the
807 * LAM,LAM,HEH ligature. See:
808 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
810 * - If a ligature is formed of components that some of which are also ligatures
811 * themselves, and those ligature components had marks attached to *their*
812 * components, we have to attach the marks to the new ligature component
813 * positions! Now *that*'s tricky! And these marks may be following the
814 * last component of the whole sequence, so we should loop forward looking
815 * for them and update them.
817 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
818 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
819 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
820 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
821 * the new ligature with a component value of 2.
823 * This in fact happened to a font... See:
824 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
827 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
828 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer);
829 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
830 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
831 unsigned int components_so_far = last_num_components;
833 if (!is_mark_ligature)
835 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
836 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
838 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
841 c->replace_glyph_with_ligature (lig_glyph, klass);
843 for (unsigned int i = 1; i < count; i++)
845 while (buffer->idx < match_positions[i] && !buffer->in_error)
847 if (!is_mark_ligature) {
848 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
850 this_comp = last_num_components;
851 unsigned int new_lig_comp = components_so_far - last_num_components +
852 MIN (this_comp, last_num_components);
853 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
855 buffer->next_glyph ();
858 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
859 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
860 components_so_far += last_num_components;
862 /* Skip the base glyph */
866 if (!is_mark_ligature && last_lig_id) {
867 /* Re-adjust components for any marks following. */
868 for (unsigned int i = buffer->idx; i < buffer->len; i++) {
869 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) {
870 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
873 unsigned int new_lig_comp = components_so_far - last_num_components +
874 MIN (this_comp, last_num_components);
875 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
883 static inline bool match_backtrack (hb_apply_context_t *c,
885 const USHORT backtrack[],
886 match_func_t match_func,
887 const void *match_data)
891 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
892 skippy_iter.reset (c->buffer->backtrack_len (), count);
893 skippy_iter.set_match_func (match_func, match_data, backtrack);
895 for (unsigned int i = 0; i < count; i++)
896 if (!skippy_iter.prev ())
897 return_trace (false);
902 static inline bool match_lookahead (hb_apply_context_t *c,
904 const USHORT lookahead[],
905 match_func_t match_func,
906 const void *match_data,
911 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
912 skippy_iter.reset (c->buffer->idx + offset - 1, count);
913 skippy_iter.set_match_func (match_func, match_data, lookahead);
915 for (unsigned int i = 0; i < count; i++)
916 if (!skippy_iter.next ())
917 return_trace (false);
926 inline bool sanitize (hb_sanitize_context_t *c) const
928 TRACE_SANITIZE (this);
929 return_trace (c->check_struct (this));
932 USHORT sequenceIndex; /* Index into current glyph
933 * sequence--first glyph = 0 */
934 USHORT lookupListIndex; /* Lookup to apply to that
935 * position--zero--based */
937 DEFINE_SIZE_STATIC (4);
941 template <typename context_t>
942 static inline void recurse_lookups (context_t *c,
943 unsigned int lookupCount,
944 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
946 for (unsigned int i = 0; i < lookupCount; i++)
947 c->recurse (lookupRecord[i].lookupListIndex);
950 static inline bool apply_lookup (hb_apply_context_t *c,
951 unsigned int count, /* Including the first glyph */
952 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
953 unsigned int lookupCount,
954 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
955 unsigned int match_length)
959 hb_buffer_t *buffer = c->buffer;
962 /* All positions are distance from beginning of *output* buffer.
965 unsigned int bl = buffer->backtrack_len ();
966 end = bl + match_length;
968 int delta = bl - buffer->idx;
969 /* Convert positions to new indexing. */
970 for (unsigned int j = 0; j < count; j++)
971 match_positions[j] += delta;
974 for (unsigned int i = 0; i < lookupCount && !buffer->in_error; i++)
976 unsigned int idx = lookupRecord[i].sequenceIndex;
980 /* Don't recurse to ourself at same position.
981 * Note that this test is too naive, it doesn't catch longer loops. */
982 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index)
985 buffer->move_to (match_positions[idx]);
987 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
988 if (!c->recurse (lookupRecord[i].lookupListIndex))
991 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
992 int delta = new_len - orig_len;
997 /* Recursed lookup changed buffer len. Adjust. */
999 /* end can't go back past the current match position.
1000 * Note: this is only true because we do NOT allow MultipleSubst
1001 * with zero sequence len. */
1002 end = MAX (MIN((int) match_positions[idx] + 1, (int) new_len), int (end) + delta);
1004 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1008 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1013 /* NOTE: delta is negative. */
1014 delta = MAX (delta, (int) next - (int) count);
1019 memmove (match_positions + next + delta, match_positions + next,
1020 (count - next) * sizeof (match_positions[0]));
1024 /* Fill in new entries. */
1025 for (unsigned int j = idx + 1; j < next; j++)
1026 match_positions[j] = match_positions[j - 1] + 1;
1028 /* And fixup the rest. */
1029 for (; next < count; next++)
1030 match_positions[next] += delta;
1033 buffer->move_to (end);
1035 return_trace (true);
1040 /* Contextual lookups */
1042 struct ContextClosureLookupContext
1044 ContextClosureFuncs funcs;
1045 const void *intersects_data;
1048 struct ContextCollectGlyphsLookupContext
1050 ContextCollectGlyphsFuncs funcs;
1051 const void *collect_data;
1054 struct ContextApplyLookupContext
1056 ContextApplyFuncs funcs;
1057 const void *match_data;
1060 static inline void context_closure_lookup (hb_closure_context_t *c,
1061 unsigned int inputCount, /* Including the first glyph (not matched) */
1062 const USHORT input[], /* Array of input values--start with second glyph */
1063 unsigned int lookupCount,
1064 const LookupRecord lookupRecord[],
1065 ContextClosureLookupContext &lookup_context)
1067 if (intersects_array (c,
1068 inputCount ? inputCount - 1 : 0, input,
1069 lookup_context.funcs.intersects, lookup_context.intersects_data))
1071 lookupCount, lookupRecord);
1074 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1075 unsigned int inputCount, /* Including the first glyph (not matched) */
1076 const USHORT input[], /* Array of input values--start with second glyph */
1077 unsigned int lookupCount,
1078 const LookupRecord lookupRecord[],
1079 ContextCollectGlyphsLookupContext &lookup_context)
1081 collect_array (c, c->input,
1082 inputCount ? inputCount - 1 : 0, input,
1083 lookup_context.funcs.collect, lookup_context.collect_data);
1085 lookupCount, lookupRecord);
1088 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1089 unsigned int inputCount, /* Including the first glyph (not matched) */
1090 const USHORT input[], /* Array of input values--start with second glyph */
1091 unsigned int lookupCount HB_UNUSED,
1092 const LookupRecord lookupRecord[] HB_UNUSED,
1093 ContextApplyLookupContext &lookup_context)
1095 return would_match_input (c,
1097 lookup_context.funcs.match, lookup_context.match_data);
1099 static inline bool context_apply_lookup (hb_apply_context_t *c,
1100 unsigned int inputCount, /* Including the first glyph (not matched) */
1101 const USHORT input[], /* Array of input values--start with second glyph */
1102 unsigned int lookupCount,
1103 const LookupRecord lookupRecord[],
1104 ContextApplyLookupContext &lookup_context)
1106 unsigned int match_length = 0;
1107 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1108 return match_input (c,
1110 lookup_context.funcs.match, lookup_context.match_data,
1111 &match_length, match_positions)
1113 inputCount, match_positions,
1114 lookupCount, lookupRecord,
1120 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1122 TRACE_CLOSURE (this);
1123 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1124 context_closure_lookup (c,
1126 lookupCount, lookupRecord,
1130 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1132 TRACE_COLLECT_GLYPHS (this);
1133 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1134 context_collect_glyphs_lookup (c,
1136 lookupCount, lookupRecord,
1140 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1142 TRACE_WOULD_APPLY (this);
1143 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1144 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1147 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1150 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1151 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1155 inline bool sanitize (hb_sanitize_context_t *c) const
1157 TRACE_SANITIZE (this);
1158 return inputCount.sanitize (c)
1159 && lookupCount.sanitize (c)
1160 && c->check_range (inputZ,
1161 inputZ[0].static_size * inputCount
1162 + lookupRecordX[0].static_size * lookupCount);
1166 USHORT inputCount; /* Total number of glyphs in input
1167 * glyph sequence--includes the first
1169 USHORT lookupCount; /* Number of LookupRecords */
1170 USHORT inputZ[VAR]; /* Array of match inputs--start with
1172 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1175 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX);
1180 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1182 TRACE_CLOSURE (this);
1183 unsigned int num_rules = rule.len;
1184 for (unsigned int i = 0; i < num_rules; i++)
1185 (this+rule[i]).closure (c, lookup_context);
1188 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1190 TRACE_COLLECT_GLYPHS (this);
1191 unsigned int num_rules = rule.len;
1192 for (unsigned int i = 0; i < num_rules; i++)
1193 (this+rule[i]).collect_glyphs (c, lookup_context);
1196 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1198 TRACE_WOULD_APPLY (this);
1199 unsigned int num_rules = rule.len;
1200 for (unsigned int i = 0; i < num_rules; i++)
1202 if ((this+rule[i]).would_apply (c, lookup_context))
1203 return_trace (true);
1205 return_trace (false);
1208 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1211 unsigned int num_rules = rule.len;
1212 for (unsigned int i = 0; i < num_rules; i++)
1214 if ((this+rule[i]).apply (c, lookup_context))
1215 return_trace (true);
1217 return_trace (false);
1220 inline bool sanitize (hb_sanitize_context_t *c) const
1222 TRACE_SANITIZE (this);
1223 return_trace (rule.sanitize (c, this));
1228 rule; /* Array of Rule tables
1229 * ordered by preference */
1231 DEFINE_SIZE_ARRAY (2, rule);
1235 struct ContextFormat1
1237 inline void closure (hb_closure_context_t *c) const
1239 TRACE_CLOSURE (this);
1241 const Coverage &cov = (this+coverage);
1243 struct ContextClosureLookupContext lookup_context = {
1248 unsigned int count = ruleSet.len;
1249 for (unsigned int i = 0; i < count; i++)
1250 if (cov.intersects_coverage (c->glyphs, i)) {
1251 const RuleSet &rule_set = this+ruleSet[i];
1252 rule_set.closure (c, lookup_context);
1256 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1258 TRACE_COLLECT_GLYPHS (this);
1259 (this+coverage).add_coverage (c->input);
1261 struct ContextCollectGlyphsLookupContext lookup_context = {
1266 unsigned int count = ruleSet.len;
1267 for (unsigned int i = 0; i < count; i++)
1268 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1271 inline bool would_apply (hb_would_apply_context_t *c) const
1273 TRACE_WOULD_APPLY (this);
1275 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1276 struct ContextApplyLookupContext lookup_context = {
1280 return_trace (rule_set.would_apply (c, lookup_context));
1283 inline const Coverage &get_coverage (void) const
1285 return this+coverage;
1288 inline bool apply (hb_apply_context_t *c) const
1291 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1292 if (likely (index == NOT_COVERED))
1293 return_trace (false);
1295 const RuleSet &rule_set = this+ruleSet[index];
1296 struct ContextApplyLookupContext lookup_context = {
1300 return_trace (rule_set.apply (c, lookup_context));
1303 inline bool sanitize (hb_sanitize_context_t *c) const
1305 TRACE_SANITIZE (this);
1306 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1310 USHORT format; /* Format identifier--format = 1 */
1312 coverage; /* Offset to Coverage table--from
1313 * beginning of table */
1314 OffsetArrayOf<RuleSet>
1315 ruleSet; /* Array of RuleSet tables
1316 * ordered by Coverage Index */
1318 DEFINE_SIZE_ARRAY (6, ruleSet);
1322 struct ContextFormat2
1324 inline void closure (hb_closure_context_t *c) const
1326 TRACE_CLOSURE (this);
1327 if (!(this+coverage).intersects (c->glyphs))
1330 const ClassDef &class_def = this+classDef;
1332 struct ContextClosureLookupContext lookup_context = {
1337 unsigned int count = ruleSet.len;
1338 for (unsigned int i = 0; i < count; i++)
1339 if (class_def.intersects_class (c->glyphs, i)) {
1340 const RuleSet &rule_set = this+ruleSet[i];
1341 rule_set.closure (c, lookup_context);
1345 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1347 TRACE_COLLECT_GLYPHS (this);
1348 (this+coverage).add_coverage (c->input);
1350 const ClassDef &class_def = this+classDef;
1351 struct ContextCollectGlyphsLookupContext lookup_context = {
1356 unsigned int count = ruleSet.len;
1357 for (unsigned int i = 0; i < count; i++)
1358 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1361 inline bool would_apply (hb_would_apply_context_t *c) const
1363 TRACE_WOULD_APPLY (this);
1365 const ClassDef &class_def = this+classDef;
1366 unsigned int index = class_def.get_class (c->glyphs[0]);
1367 const RuleSet &rule_set = this+ruleSet[index];
1368 struct ContextApplyLookupContext lookup_context = {
1372 return_trace (rule_set.would_apply (c, lookup_context));
1375 inline const Coverage &get_coverage (void) const
1377 return this+coverage;
1380 inline bool apply (hb_apply_context_t *c) const
1383 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1384 if (likely (index == NOT_COVERED)) return_trace (false);
1386 const ClassDef &class_def = this+classDef;
1387 index = class_def.get_class (c->buffer->cur().codepoint);
1388 const RuleSet &rule_set = this+ruleSet[index];
1389 struct ContextApplyLookupContext lookup_context = {
1393 return_trace (rule_set.apply (c, lookup_context));
1396 inline bool sanitize (hb_sanitize_context_t *c) const
1398 TRACE_SANITIZE (this);
1399 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1403 USHORT format; /* Format identifier--format = 2 */
1405 coverage; /* Offset to Coverage table--from
1406 * beginning of table */
1408 classDef; /* Offset to glyph ClassDef table--from
1409 * beginning of table */
1410 OffsetArrayOf<RuleSet>
1411 ruleSet; /* Array of RuleSet tables
1412 * ordered by class */
1414 DEFINE_SIZE_ARRAY (8, ruleSet);
1418 struct ContextFormat3
1420 inline void closure (hb_closure_context_t *c) const
1422 TRACE_CLOSURE (this);
1423 if (!(this+coverageZ[0]).intersects (c->glyphs))
1426 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1427 struct ContextClosureLookupContext lookup_context = {
1428 {intersects_coverage},
1431 context_closure_lookup (c,
1432 glyphCount, (const USHORT *) (coverageZ + 1),
1433 lookupCount, lookupRecord,
1437 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1439 TRACE_COLLECT_GLYPHS (this);
1440 (this+coverageZ[0]).add_coverage (c->input);
1442 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1443 struct ContextCollectGlyphsLookupContext lookup_context = {
1448 context_collect_glyphs_lookup (c,
1449 glyphCount, (const USHORT *) (coverageZ + 1),
1450 lookupCount, lookupRecord,
1454 inline bool would_apply (hb_would_apply_context_t *c) const
1456 TRACE_WOULD_APPLY (this);
1458 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1459 struct ContextApplyLookupContext lookup_context = {
1463 return_trace (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
1466 inline const Coverage &get_coverage (void) const
1468 return this+coverageZ[0];
1471 inline bool apply (hb_apply_context_t *c) const
1474 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
1475 if (likely (index == NOT_COVERED)) return_trace (false);
1477 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1478 struct ContextApplyLookupContext lookup_context = {
1482 return_trace (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
1485 inline bool sanitize (hb_sanitize_context_t *c) const
1487 TRACE_SANITIZE (this);
1488 if (!c->check_struct (this)) return_trace (false);
1489 unsigned int count = glyphCount;
1490 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
1491 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false);
1492 for (unsigned int i = 0; i < count; i++)
1493 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
1494 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count);
1495 return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
1499 USHORT format; /* Format identifier--format = 3 */
1500 USHORT glyphCount; /* Number of glyphs in the input glyph
1502 USHORT lookupCount; /* Number of LookupRecords */
1504 coverageZ[VAR]; /* Array of offsets to Coverage
1505 * table in glyph sequence order */
1506 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1509 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX);
1514 template <typename context_t>
1515 inline typename context_t::return_t dispatch (context_t *c) const
1517 TRACE_DISPATCH (this, u.format);
1518 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1520 case 1: return_trace (c->dispatch (u.format1));
1521 case 2: return_trace (c->dispatch (u.format2));
1522 case 3: return_trace (c->dispatch (u.format3));
1523 default:return_trace (c->default_return_value ());
1529 USHORT format; /* Format identifier */
1530 ContextFormat1 format1;
1531 ContextFormat2 format2;
1532 ContextFormat3 format3;
1537 /* Chaining Contextual lookups */
1539 struct ChainContextClosureLookupContext
1541 ContextClosureFuncs funcs;
1542 const void *intersects_data[3];
1545 struct ChainContextCollectGlyphsLookupContext
1547 ContextCollectGlyphsFuncs funcs;
1548 const void *collect_data[3];
1551 struct ChainContextApplyLookupContext
1553 ContextApplyFuncs funcs;
1554 const void *match_data[3];
1557 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
1558 unsigned int backtrackCount,
1559 const USHORT backtrack[],
1560 unsigned int inputCount, /* Including the first glyph (not matched) */
1561 const USHORT input[], /* Array of input values--start with second glyph */
1562 unsigned int lookaheadCount,
1563 const USHORT lookahead[],
1564 unsigned int lookupCount,
1565 const LookupRecord lookupRecord[],
1566 ChainContextClosureLookupContext &lookup_context)
1568 if (intersects_array (c,
1569 backtrackCount, backtrack,
1570 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
1571 && intersects_array (c,
1572 inputCount ? inputCount - 1 : 0, input,
1573 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
1574 && intersects_array (c,
1575 lookaheadCount, lookahead,
1576 lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
1578 lookupCount, lookupRecord);
1581 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1582 unsigned int backtrackCount,
1583 const USHORT backtrack[],
1584 unsigned int inputCount, /* Including the first glyph (not matched) */
1585 const USHORT input[], /* Array of input values--start with second glyph */
1586 unsigned int lookaheadCount,
1587 const USHORT lookahead[],
1588 unsigned int lookupCount,
1589 const LookupRecord lookupRecord[],
1590 ChainContextCollectGlyphsLookupContext &lookup_context)
1592 collect_array (c, c->before,
1593 backtrackCount, backtrack,
1594 lookup_context.funcs.collect, lookup_context.collect_data[0]);
1595 collect_array (c, c->input,
1596 inputCount ? inputCount - 1 : 0, input,
1597 lookup_context.funcs.collect, lookup_context.collect_data[1]);
1598 collect_array (c, c->after,
1599 lookaheadCount, lookahead,
1600 lookup_context.funcs.collect, lookup_context.collect_data[2]);
1602 lookupCount, lookupRecord);
1605 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
1606 unsigned int backtrackCount,
1607 const USHORT backtrack[] HB_UNUSED,
1608 unsigned int inputCount, /* Including the first glyph (not matched) */
1609 const USHORT input[], /* Array of input values--start with second glyph */
1610 unsigned int lookaheadCount,
1611 const USHORT lookahead[] HB_UNUSED,
1612 unsigned int lookupCount HB_UNUSED,
1613 const LookupRecord lookupRecord[] HB_UNUSED,
1614 ChainContextApplyLookupContext &lookup_context)
1616 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
1617 && would_match_input (c,
1619 lookup_context.funcs.match, lookup_context.match_data[1]);
1622 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1623 unsigned int backtrackCount,
1624 const USHORT backtrack[],
1625 unsigned int inputCount, /* Including the first glyph (not matched) */
1626 const USHORT input[], /* Array of input values--start with second glyph */
1627 unsigned int lookaheadCount,
1628 const USHORT lookahead[],
1629 unsigned int lookupCount,
1630 const LookupRecord lookupRecord[],
1631 ChainContextApplyLookupContext &lookup_context)
1633 unsigned int match_length = 0;
1634 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1635 return match_input (c,
1637 lookup_context.funcs.match, lookup_context.match_data[1],
1638 &match_length, match_positions)
1639 && match_backtrack (c,
1640 backtrackCount, backtrack,
1641 lookup_context.funcs.match, lookup_context.match_data[0])
1642 && match_lookahead (c,
1643 lookaheadCount, lookahead,
1644 lookup_context.funcs.match, lookup_context.match_data[2],
1647 inputCount, match_positions,
1648 lookupCount, lookupRecord,
1654 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1656 TRACE_CLOSURE (this);
1657 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1658 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1659 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1660 chain_context_closure_lookup (c,
1661 backtrack.len, backtrack.array,
1662 input.len, input.array,
1663 lookahead.len, lookahead.array,
1664 lookup.len, lookup.array,
1668 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1670 TRACE_COLLECT_GLYPHS (this);
1671 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1672 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1673 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1674 chain_context_collect_glyphs_lookup (c,
1675 backtrack.len, backtrack.array,
1676 input.len, input.array,
1677 lookahead.len, lookahead.array,
1678 lookup.len, lookup.array,
1682 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1684 TRACE_WOULD_APPLY (this);
1685 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1686 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1687 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1688 return_trace (chain_context_would_apply_lookup (c,
1689 backtrack.len, backtrack.array,
1690 input.len, input.array,
1691 lookahead.len, lookahead.array, lookup.len,
1692 lookup.array, lookup_context));
1695 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1698 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1699 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1700 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1701 return_trace (chain_context_apply_lookup (c,
1702 backtrack.len, backtrack.array,
1703 input.len, input.array,
1704 lookahead.len, lookahead.array, lookup.len,
1705 lookup.array, lookup_context));
1708 inline bool sanitize (hb_sanitize_context_t *c) const
1710 TRACE_SANITIZE (this);
1711 if (!backtrack.sanitize (c)) return_trace (false);
1712 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1713 if (!input.sanitize (c)) return_trace (false);
1714 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1715 if (!lookahead.sanitize (c)) return_trace (false);
1716 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1717 return_trace (lookup.sanitize (c));
1722 backtrack; /* Array of backtracking values
1723 * (to be matched before the input
1725 HeadlessArrayOf<USHORT>
1726 inputX; /* Array of input values (start with
1729 lookaheadX; /* Array of lookahead values's (to be
1730 * matched after the input sequence) */
1731 ArrayOf<LookupRecord>
1732 lookupX; /* Array of LookupRecords--in
1735 DEFINE_SIZE_MIN (8);
1740 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1742 TRACE_CLOSURE (this);
1743 unsigned int num_rules = rule.len;
1744 for (unsigned int i = 0; i < num_rules; i++)
1745 (this+rule[i]).closure (c, lookup_context);
1748 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1750 TRACE_COLLECT_GLYPHS (this);
1751 unsigned int num_rules = rule.len;
1752 for (unsigned int i = 0; i < num_rules; i++)
1753 (this+rule[i]).collect_glyphs (c, lookup_context);
1756 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1758 TRACE_WOULD_APPLY (this);
1759 unsigned int num_rules = rule.len;
1760 for (unsigned int i = 0; i < num_rules; i++)
1761 if ((this+rule[i]).would_apply (c, lookup_context))
1762 return_trace (true);
1764 return_trace (false);
1767 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1770 unsigned int num_rules = rule.len;
1771 for (unsigned int i = 0; i < num_rules; i++)
1772 if ((this+rule[i]).apply (c, lookup_context))
1773 return_trace (true);
1775 return_trace (false);
1778 inline bool sanitize (hb_sanitize_context_t *c) const
1780 TRACE_SANITIZE (this);
1781 return_trace (rule.sanitize (c, this));
1785 OffsetArrayOf<ChainRule>
1786 rule; /* Array of ChainRule tables
1787 * ordered by preference */
1789 DEFINE_SIZE_ARRAY (2, rule);
1792 struct ChainContextFormat1
1794 inline void closure (hb_closure_context_t *c) const
1796 TRACE_CLOSURE (this);
1797 const Coverage &cov = (this+coverage);
1799 struct ChainContextClosureLookupContext lookup_context = {
1804 unsigned int count = ruleSet.len;
1805 for (unsigned int i = 0; i < count; i++)
1806 if (cov.intersects_coverage (c->glyphs, i)) {
1807 const ChainRuleSet &rule_set = this+ruleSet[i];
1808 rule_set.closure (c, lookup_context);
1812 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1814 TRACE_COLLECT_GLYPHS (this);
1815 (this+coverage).add_coverage (c->input);
1817 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1822 unsigned int count = ruleSet.len;
1823 for (unsigned int i = 0; i < count; i++)
1824 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1827 inline bool would_apply (hb_would_apply_context_t *c) const
1829 TRACE_WOULD_APPLY (this);
1831 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1832 struct ChainContextApplyLookupContext lookup_context = {
1836 return_trace (rule_set.would_apply (c, lookup_context));
1839 inline const Coverage &get_coverage (void) const
1841 return this+coverage;
1844 inline bool apply (hb_apply_context_t *c) const
1847 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1848 if (likely (index == NOT_COVERED)) return_trace (false);
1850 const ChainRuleSet &rule_set = this+ruleSet[index];
1851 struct ChainContextApplyLookupContext lookup_context = {
1855 return_trace (rule_set.apply (c, lookup_context));
1858 inline bool sanitize (hb_sanitize_context_t *c) const
1860 TRACE_SANITIZE (this);
1861 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1865 USHORT format; /* Format identifier--format = 1 */
1867 coverage; /* Offset to Coverage table--from
1868 * beginning of table */
1869 OffsetArrayOf<ChainRuleSet>
1870 ruleSet; /* Array of ChainRuleSet tables
1871 * ordered by Coverage Index */
1873 DEFINE_SIZE_ARRAY (6, ruleSet);
1876 struct ChainContextFormat2
1878 inline void closure (hb_closure_context_t *c) const
1880 TRACE_CLOSURE (this);
1881 if (!(this+coverage).intersects (c->glyphs))
1884 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1885 const ClassDef &input_class_def = this+inputClassDef;
1886 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1888 struct ChainContextClosureLookupContext lookup_context = {
1890 {&backtrack_class_def,
1892 &lookahead_class_def}
1895 unsigned int count = ruleSet.len;
1896 for (unsigned int i = 0; i < count; i++)
1897 if (input_class_def.intersects_class (c->glyphs, i)) {
1898 const ChainRuleSet &rule_set = this+ruleSet[i];
1899 rule_set.closure (c, lookup_context);
1903 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1905 TRACE_COLLECT_GLYPHS (this);
1906 (this+coverage).add_coverage (c->input);
1908 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1909 const ClassDef &input_class_def = this+inputClassDef;
1910 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1912 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1914 {&backtrack_class_def,
1916 &lookahead_class_def}
1919 unsigned int count = ruleSet.len;
1920 for (unsigned int i = 0; i < count; i++)
1921 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1924 inline bool would_apply (hb_would_apply_context_t *c) const
1926 TRACE_WOULD_APPLY (this);
1928 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1929 const ClassDef &input_class_def = this+inputClassDef;
1930 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1932 unsigned int index = input_class_def.get_class (c->glyphs[0]);
1933 const ChainRuleSet &rule_set = this+ruleSet[index];
1934 struct ChainContextApplyLookupContext lookup_context = {
1936 {&backtrack_class_def,
1938 &lookahead_class_def}
1940 return_trace (rule_set.would_apply (c, lookup_context));
1943 inline const Coverage &get_coverage (void) const
1945 return this+coverage;
1948 inline bool apply (hb_apply_context_t *c) const
1951 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1952 if (likely (index == NOT_COVERED)) return_trace (false);
1954 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1955 const ClassDef &input_class_def = this+inputClassDef;
1956 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1958 index = input_class_def.get_class (c->buffer->cur().codepoint);
1959 const ChainRuleSet &rule_set = this+ruleSet[index];
1960 struct ChainContextApplyLookupContext lookup_context = {
1962 {&backtrack_class_def,
1964 &lookahead_class_def}
1966 return_trace (rule_set.apply (c, lookup_context));
1969 inline bool sanitize (hb_sanitize_context_t *c) const
1971 TRACE_SANITIZE (this);
1972 return_trace (coverage.sanitize (c, this) &&
1973 backtrackClassDef.sanitize (c, this) &&
1974 inputClassDef.sanitize (c, this) &&
1975 lookaheadClassDef.sanitize (c, this) &&
1976 ruleSet.sanitize (c, this));
1980 USHORT format; /* Format identifier--format = 2 */
1982 coverage; /* Offset to Coverage table--from
1983 * beginning of table */
1985 backtrackClassDef; /* Offset to glyph ClassDef table
1986 * containing backtrack sequence
1987 * data--from beginning of table */
1989 inputClassDef; /* Offset to glyph ClassDef
1990 * table containing input sequence
1991 * data--from beginning of table */
1993 lookaheadClassDef; /* Offset to glyph ClassDef table
1994 * containing lookahead sequence
1995 * data--from beginning of table */
1996 OffsetArrayOf<ChainRuleSet>
1997 ruleSet; /* Array of ChainRuleSet tables
1998 * ordered by class */
2000 DEFINE_SIZE_ARRAY (12, ruleSet);
2003 struct ChainContextFormat3
2005 inline void closure (hb_closure_context_t *c) const
2007 TRACE_CLOSURE (this);
2008 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2010 if (!(this+input[0]).intersects (c->glyphs))
2013 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2014 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2015 struct ChainContextClosureLookupContext lookup_context = {
2016 {intersects_coverage},
2019 chain_context_closure_lookup (c,
2020 backtrack.len, (const USHORT *) backtrack.array,
2021 input.len, (const USHORT *) input.array + 1,
2022 lookahead.len, (const USHORT *) lookahead.array,
2023 lookup.len, lookup.array,
2027 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
2029 TRACE_COLLECT_GLYPHS (this);
2030 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2032 (this+input[0]).add_coverage (c->input);
2034 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2035 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2036 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2040 chain_context_collect_glyphs_lookup (c,
2041 backtrack.len, (const USHORT *) backtrack.array,
2042 input.len, (const USHORT *) input.array + 1,
2043 lookahead.len, (const USHORT *) lookahead.array,
2044 lookup.len, lookup.array,
2048 inline bool would_apply (hb_would_apply_context_t *c) const
2050 TRACE_WOULD_APPLY (this);
2052 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2053 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2054 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2055 struct ChainContextApplyLookupContext lookup_context = {
2059 return_trace (chain_context_would_apply_lookup (c,
2060 backtrack.len, (const USHORT *) backtrack.array,
2061 input.len, (const USHORT *) input.array + 1,
2062 lookahead.len, (const USHORT *) lookahead.array,
2063 lookup.len, lookup.array, lookup_context));
2066 inline const Coverage &get_coverage (void) const
2068 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2069 return this+input[0];
2072 inline bool apply (hb_apply_context_t *c) const
2075 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2077 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
2078 if (likely (index == NOT_COVERED)) return_trace (false);
2080 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2081 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2082 struct ChainContextApplyLookupContext lookup_context = {
2086 return_trace (chain_context_apply_lookup (c,
2087 backtrack.len, (const USHORT *) backtrack.array,
2088 input.len, (const USHORT *) input.array + 1,
2089 lookahead.len, (const USHORT *) lookahead.array,
2090 lookup.len, lookup.array, lookup_context));
2093 inline bool sanitize (hb_sanitize_context_t *c) const
2095 TRACE_SANITIZE (this);
2096 if (!backtrack.sanitize (c, this)) return_trace (false);
2097 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2098 if (!input.sanitize (c, this)) return_trace (false);
2099 if (!input.len) return_trace (false); /* To be consistent with Context. */
2100 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2101 if (!lookahead.sanitize (c, this)) return_trace (false);
2102 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2103 return_trace (lookup.sanitize (c));
2107 USHORT format; /* Format identifier--format = 3 */
2108 OffsetArrayOf<Coverage>
2109 backtrack; /* Array of coverage tables
2110 * in backtracking sequence, in glyph
2112 OffsetArrayOf<Coverage>
2113 inputX ; /* Array of coverage
2114 * tables in input sequence, in glyph
2116 OffsetArrayOf<Coverage>
2117 lookaheadX; /* Array of coverage tables
2118 * in lookahead sequence, in glyph
2120 ArrayOf<LookupRecord>
2121 lookupX; /* Array of LookupRecords--in
2124 DEFINE_SIZE_MIN (10);
2129 template <typename context_t>
2130 inline typename context_t::return_t dispatch (context_t *c) const
2132 TRACE_DISPATCH (this, u.format);
2133 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2135 case 1: return_trace (c->dispatch (u.format1));
2136 case 2: return_trace (c->dispatch (u.format2));
2137 case 3: return_trace (c->dispatch (u.format3));
2138 default:return_trace (c->default_return_value ());
2144 USHORT format; /* Format identifier */
2145 ChainContextFormat1 format1;
2146 ChainContextFormat2 format2;
2147 ChainContextFormat3 format3;
2152 template <typename T>
2153 struct ExtensionFormat1
2155 inline unsigned int get_type (void) const { return extensionLookupType; }
2157 template <typename X>
2158 inline const X& get_subtable (void) const
2160 unsigned int offset = extensionOffset;
2161 if (unlikely (!offset)) return Null(typename T::LookupSubTable);
2162 return StructAtOffset<typename T::LookupSubTable> (this, offset);
2165 template <typename context_t>
2166 inline typename context_t::return_t dispatch (context_t *c) const
2168 TRACE_DISPATCH (this, format);
2169 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
2170 return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ()));
2173 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
2174 inline bool sanitize (hb_sanitize_context_t *c) const
2176 TRACE_SANITIZE (this);
2177 return_trace (c->check_struct (this) && extensionOffset != 0);
2181 USHORT format; /* Format identifier. Set to 1. */
2182 USHORT extensionLookupType; /* Lookup type of subtable referenced
2183 * by ExtensionOffset (i.e. the
2184 * extension subtable). */
2185 ULONG extensionOffset; /* Offset to the extension subtable,
2186 * of lookup type subtable. */
2188 DEFINE_SIZE_STATIC (8);
2191 template <typename T>
2194 inline unsigned int get_type (void) const
2197 case 1: return u.format1.get_type ();
2201 template <typename X>
2202 inline const X& get_subtable (void) const
2205 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> ();
2206 default:return Null(typename T::LookupSubTable);
2210 template <typename context_t>
2211 inline typename context_t::return_t dispatch (context_t *c) const
2213 TRACE_DISPATCH (this, u.format);
2214 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2216 case 1: return_trace (u.format1.dispatch (c));
2217 default:return_trace (c->default_return_value ());
2223 USHORT format; /* Format identifier */
2224 ExtensionFormat1<T> format1;
2235 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
2236 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
2238 inline unsigned int get_script_count (void) const
2239 { return (this+scriptList).len; }
2240 inline const Tag& get_script_tag (unsigned int i) const
2241 { return (this+scriptList).get_tag (i); }
2242 inline unsigned int get_script_tags (unsigned int start_offset,
2243 unsigned int *script_count /* IN/OUT */,
2244 hb_tag_t *script_tags /* OUT */) const
2245 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
2246 inline const Script& get_script (unsigned int i) const
2247 { return (this+scriptList)[i]; }
2248 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
2249 { return (this+scriptList).find_index (tag, index); }
2251 inline unsigned int get_feature_count (void) const
2252 { return (this+featureList).len; }
2253 inline hb_tag_t get_feature_tag (unsigned int i) const
2254 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
2255 inline unsigned int get_feature_tags (unsigned int start_offset,
2256 unsigned int *feature_count /* IN/OUT */,
2257 hb_tag_t *feature_tags /* OUT */) const
2258 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
2259 inline const Feature& get_feature (unsigned int i) const
2260 { return (this+featureList)[i]; }
2261 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
2262 { return (this+featureList).find_index (tag, index); }
2264 inline unsigned int get_lookup_count (void) const
2265 { return (this+lookupList).len; }
2266 inline const Lookup& get_lookup (unsigned int i) const
2267 { return (this+lookupList)[i]; }
2269 inline bool sanitize (hb_sanitize_context_t *c) const
2271 TRACE_SANITIZE (this);
2272 return_trace (version.sanitize (c) &&
2273 likely (version.major == 1) &&
2274 scriptList.sanitize (c, this) &&
2275 featureList.sanitize (c, this) &&
2276 lookupList.sanitize (c, this));
2280 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
2282 OffsetTo<ScriptList>
2283 scriptList; /* ScriptList table */
2284 OffsetTo<FeatureList>
2285 featureList; /* FeatureList table */
2286 OffsetTo<LookupList>
2287 lookupList; /* LookupList table */
2289 DEFINE_SIZE_STATIC (10);
2293 } /* namespace OT */
2296 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */