2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
33 #include "hb-buffer.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool, 0>
47 const char *get_name () { return "INTERSECTS"; }
49 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
50 static return_t default_return_value () { return false; }
51 bool stop_sublookup_iteration (return_t r) const { return r; }
53 const hb_set_t *glyphs;
54 unsigned int debug_depth;
56 hb_intersects_context_t (const hb_set_t *glyphs_) :
61 struct hb_closure_context_t :
62 hb_dispatch_context_t<hb_closure_context_t, hb_empty_t, 0>
64 const char *get_name () { return "CLOSURE"; }
65 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
67 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
68 static return_t default_return_value () { return hb_empty_t (); }
69 void recurse (unsigned int lookup_index)
71 if (unlikely (nesting_level_left == 0 || !recurse_func))
75 recurse_func (this, lookup_index);
79 bool lookup_limit_exceeded ()
80 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
82 bool should_visit_lookup (unsigned int lookup_index)
84 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
87 if (is_lookup_done (lookup_index))
90 done_lookups->set (lookup_index, glyphs->get_population ());
94 bool is_lookup_done (unsigned int lookup_index)
96 /* Have we visited this lookup with the current set of glyphs? */
97 return done_lookups->get (lookup_index) == glyphs->get_population ();
103 recurse_func_t recurse_func;
104 unsigned int nesting_level_left;
105 unsigned int debug_depth;
107 hb_closure_context_t (hb_face_t *face_,
109 hb_map_t *done_lookups_,
110 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
113 recurse_func (nullptr),
114 nesting_level_left (nesting_level_left_),
116 done_lookups (done_lookups_),
120 ~hb_closure_context_t () { flush (); }
122 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
126 hb_set_del_range (output, face->get_num_glyphs (), hb_set_get_max (output)); /* Remove invalid glyphs. */
127 hb_set_union (glyphs, output);
128 hb_set_clear (output);
132 hb_map_t *done_lookups;
133 unsigned int lookup_count;
136 struct hb_closure_lookups_context_t :
137 hb_dispatch_context_t<hb_closure_lookups_context_t, hb_empty_t, 0>
139 const char *get_name () { return "CLOSURE_LOOKUPS"; }
140 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
141 template <typename T>
142 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
143 static return_t default_return_value () { return hb_empty_t (); }
144 void recurse (unsigned lookup_index)
146 if (unlikely (nesting_level_left == 0 || !recurse_func))
149 /* Return if new lookup was recursed to before. */
150 if (is_lookup_visited (lookup_index))
153 set_lookup_visited (lookup_index);
154 nesting_level_left--;
155 recurse_func (this, lookup_index);
156 nesting_level_left++;
159 void set_lookup_visited (unsigned lookup_index)
160 { visited_lookups->add (lookup_index); }
162 void set_lookup_inactive (unsigned lookup_index)
163 { inactive_lookups->add (lookup_index); }
165 bool lookup_limit_exceeded ()
166 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
168 bool is_lookup_visited (unsigned lookup_index)
170 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
173 return visited_lookups->has (lookup_index);
177 const hb_set_t *glyphs;
178 recurse_func_t recurse_func;
179 unsigned int nesting_level_left;
180 unsigned int debug_depth;
182 hb_closure_lookups_context_t (hb_face_t *face_,
183 const hb_set_t *glyphs_,
184 hb_set_t *visited_lookups_,
185 hb_set_t *inactive_lookups_,
186 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
189 recurse_func (nullptr),
190 nesting_level_left (nesting_level_left_),
192 visited_lookups (visited_lookups_),
193 inactive_lookups (inactive_lookups_),
196 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
199 hb_set_t *visited_lookups;
200 hb_set_t *inactive_lookups;
201 unsigned int lookup_count;
204 struct hb_would_apply_context_t :
205 hb_dispatch_context_t<hb_would_apply_context_t, bool, 0>
207 const char *get_name () { return "WOULD_APPLY"; }
208 template <typename T>
209 return_t dispatch (const T &obj) { return obj.would_apply (this); }
210 static return_t default_return_value () { return false; }
211 bool stop_sublookup_iteration (return_t r) const { return r; }
214 const hb_codepoint_t *glyphs;
217 unsigned int debug_depth;
219 hb_would_apply_context_t (hb_face_t *face_,
220 const hb_codepoint_t *glyphs_,
222 bool zero_context_) :
226 zero_context (zero_context_),
231 struct hb_collect_glyphs_context_t :
232 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_empty_t, 0>
234 const char *get_name () { return "COLLECT_GLYPHS"; }
235 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
236 template <typename T>
237 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
238 static return_t default_return_value () { return hb_empty_t (); }
239 void recurse (unsigned int lookup_index)
241 if (unlikely (nesting_level_left == 0 || !recurse_func))
244 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
245 * past the previous check. For GSUB, we only want to collect the output
246 * glyphs in the recursion. If output is not requested, we can go home now.
248 * Note further, that the above is not exactly correct. A recursed lookup
249 * is allowed to match input that is not matched in the context, but that's
250 * not how most fonts are built. It's possible to relax that and recurse
251 * with all sets here if it proves to be an issue.
254 if (output == hb_set_get_empty ())
257 /* Return if new lookup was recursed to before. */
258 if (recursed_lookups->has (lookup_index))
261 hb_set_t *old_before = before;
262 hb_set_t *old_input = input;
263 hb_set_t *old_after = after;
264 before = input = after = hb_set_get_empty ();
266 nesting_level_left--;
267 recurse_func (this, lookup_index);
268 nesting_level_left++;
274 recursed_lookups->add (lookup_index);
282 recurse_func_t recurse_func;
283 hb_set_t *recursed_lookups;
284 unsigned int nesting_level_left;
285 unsigned int debug_depth;
287 hb_collect_glyphs_context_t (hb_face_t *face_,
288 hb_set_t *glyphs_before, /* OUT. May be NULL */
289 hb_set_t *glyphs_input, /* OUT. May be NULL */
290 hb_set_t *glyphs_after, /* OUT. May be NULL */
291 hb_set_t *glyphs_output, /* OUT. May be NULL */
292 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
294 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
295 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
296 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
297 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
298 recurse_func (nullptr),
299 recursed_lookups (hb_set_create ()),
300 nesting_level_left (nesting_level_left_),
302 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
304 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
309 template <typename set_t>
310 struct hb_collect_coverage_context_t :
311 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE>
313 const char *get_name () { return "GET_COVERAGE"; }
314 typedef const Coverage &return_t;
315 template <typename T>
316 return_t dispatch (const T &obj) { return obj.get_coverage (); }
317 static return_t default_return_value () { return Null (Coverage); }
318 bool stop_sublookup_iteration (return_t r) const
320 r.collect_coverage (set);
324 hb_collect_coverage_context_t (set_t *set_) :
329 unsigned int debug_depth;
333 struct hb_ot_apply_context_t :
334 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
343 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
346 match_func (nullptr),
347 match_data (nullptr) {}
349 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
351 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
352 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
353 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
354 void set_mask (hb_mask_t mask_) { mask = mask_; }
355 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
356 void set_match_func (match_func_t match_func_,
357 const void *match_data_)
358 { match_func = match_func_; match_data = match_data_; }
366 may_match_t may_match (const hb_glyph_info_t &info,
367 const HBUINT16 *glyph_data) const
369 if (!(info.mask & mask) ||
370 (syllable && syllable != info.syllable ()))
374 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
385 may_skip_t may_skip (const hb_ot_apply_context_t *c,
386 const hb_glyph_info_t &info) const
388 if (!c->check_glyph_property (&info, lookup_props))
391 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
392 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
393 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
400 unsigned int lookup_props;
405 match_func_t match_func;
406 const void *match_data;
409 struct skipping_iterator_t
411 void init (hb_ot_apply_context_t *c_, bool context_match = false)
414 match_glyph_data = nullptr;
415 matcher.set_match_func (nullptr, nullptr);
416 matcher.set_lookup_props (c->lookup_props);
417 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
418 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
419 /* Ignore ZWJ if we are matching context, or asked to. */
420 matcher.set_ignore_zwj (context_match || c->auto_zwj);
421 matcher.set_mask (context_match ? -1 : c->lookup_mask);
423 void set_lookup_props (unsigned int lookup_props)
425 matcher.set_lookup_props (lookup_props);
427 void set_match_func (matcher_t::match_func_t match_func_,
428 const void *match_data_,
429 const HBUINT16 glyph_data[])
431 matcher.set_match_func (match_func_, match_data_);
432 match_glyph_data = glyph_data;
435 void reset (unsigned int start_index_,
436 unsigned int num_items_)
439 num_items = num_items_;
440 end = c->buffer->len;
441 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
447 if (match_glyph_data) match_glyph_data--;
450 matcher_t::may_skip_t
451 may_skip (const hb_glyph_info_t &info) const
452 { return matcher.may_skip (c, info); }
456 assert (num_items > 0);
457 while (idx + num_items < end)
460 const hb_glyph_info_t &info = c->buffer->info[idx];
462 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
463 if (unlikely (skip == matcher_t::SKIP_YES))
466 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
467 if (match == matcher_t::MATCH_YES ||
468 (match == matcher_t::MATCH_MAYBE &&
469 skip == matcher_t::SKIP_NO))
472 if (match_glyph_data) match_glyph_data++;
476 if (skip == matcher_t::SKIP_NO)
483 assert (num_items > 0);
484 while (idx > num_items - 1)
487 const hb_glyph_info_t &info = c->buffer->out_info[idx];
489 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
490 if (unlikely (skip == matcher_t::SKIP_YES))
493 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
494 if (match == matcher_t::MATCH_YES ||
495 (match == matcher_t::MATCH_MAYBE &&
496 skip == matcher_t::SKIP_NO))
499 if (match_glyph_data) match_glyph_data++;
503 if (skip == matcher_t::SKIP_NO)
511 hb_ot_apply_context_t *c;
513 const HBUINT16 *match_glyph_data;
515 unsigned int num_items;
520 const char *get_name () { return "APPLY"; }
521 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
522 template <typename T>
523 return_t dispatch (const T &obj) { return obj.apply (this); }
524 static return_t default_return_value () { return false; }
525 bool stop_sublookup_iteration (return_t r) const { return r; }
526 return_t recurse (unsigned int sub_lookup_index)
528 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
529 return default_return_value ();
531 nesting_level_left--;
532 bool ret = recurse_func (this, sub_lookup_index);
533 nesting_level_left++;
537 skipping_iterator_t iter_input, iter_context;
542 recurse_func_t recurse_func;
544 const VariationStore &var_store;
546 hb_direction_t direction;
547 hb_mask_t lookup_mask;
548 unsigned int table_index; /* GSUB/GPOS */
549 unsigned int lookup_index;
550 unsigned int lookup_props;
551 unsigned int nesting_level_left;
552 unsigned int debug_depth;
554 bool has_glyph_classes;
559 uint32_t random_state;
562 hb_ot_apply_context_t (unsigned int table_index_,
564 hb_buffer_t *buffer_) :
565 iter_input (), iter_context (),
566 font (font_), face (font->face), buffer (buffer_),
567 recurse_func (nullptr),
569 #ifndef HB_NO_OT_LAYOUT
570 *face->table.GDEF->table
575 var_store (gdef.get_var_store ()),
576 direction (buffer_->props.direction),
578 table_index (table_index_),
579 lookup_index ((unsigned int) -1),
581 nesting_level_left (HB_MAX_NESTING_LEVEL),
583 has_glyph_classes (gdef.has_glyph_classes ()),
587 random_state (1) { init_iters (); }
591 iter_input.init (this, false);
592 iter_context.init (this, true);
595 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
596 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
597 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
598 void set_random (bool random_) { random = random_; }
599 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
600 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
601 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
603 uint32_t random_number ()
605 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
606 random_state = random_state * 48271 % 2147483647;
610 bool match_properties_mark (hb_codepoint_t glyph,
611 unsigned int glyph_props,
612 unsigned int match_props) const
614 /* If using mark filtering sets, the high short of
615 * match_props has the set index.
617 if (match_props & LookupFlag::UseMarkFilteringSet)
618 return gdef.mark_set_covers (match_props >> 16, glyph);
620 /* The second byte of match_props has the meaning
621 * "ignore marks of attachment type different than
622 * the attachment type specified."
624 if (match_props & LookupFlag::MarkAttachmentType)
625 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
630 bool check_glyph_property (const hb_glyph_info_t *info,
631 unsigned int match_props) const
633 hb_codepoint_t glyph = info->codepoint;
634 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
636 /* Not covered, if, for example, glyph class is ligature and
637 * match_props includes LookupFlags::IgnoreLigatures
639 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
642 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
643 return match_properties_mark (glyph, glyph_props, match_props);
648 void _set_glyph_class (hb_codepoint_t glyph_index,
649 unsigned int class_guess = 0,
650 bool ligature = false,
651 bool component = false) const
653 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
655 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
658 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
659 /* In the only place that the MULTIPLIED bit is used, Uniscribe
660 * seems to only care about the "last" transformation between
661 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
662 * and ligate again, it forgives the multiplication and acts as
663 * if only ligation happened. As such, clear MULTIPLIED bit.
665 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
668 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
670 if (likely (has_glyph_classes))
671 props = (props & ~HB_OT_LAYOUT_GLYPH_PROPS_CLASS_MASK) | gdef.get_glyph_props (glyph_index);
672 else if (class_guess)
673 props = (props & ~HB_OT_LAYOUT_GLYPH_PROPS_CLASS_MASK) | class_guess;
675 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
678 void replace_glyph (hb_codepoint_t glyph_index) const
680 _set_glyph_class (glyph_index);
681 buffer->replace_glyph (glyph_index);
683 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
685 _set_glyph_class (glyph_index);
686 buffer->cur().codepoint = glyph_index;
688 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
689 unsigned int class_guess) const
691 _set_glyph_class (glyph_index, class_guess, true);
692 buffer->replace_glyph (glyph_index);
694 void output_glyph_for_component (hb_codepoint_t glyph_index,
695 unsigned int class_guess) const
697 _set_glyph_class (glyph_index, class_guess, false, true);
698 buffer->output_glyph (glyph_index);
703 struct hb_get_subtables_context_t :
704 hb_dispatch_context_t<hb_get_subtables_context_t, hb_empty_t, HB_DEBUG_APPLY>
706 template <typename Type>
707 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
709 const Type *typed_obj = (const Type *) obj;
710 return typed_obj->apply (c);
713 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
715 struct hb_applicable_t
717 template <typename T>
718 void init (const T &obj_, hb_apply_func_t apply_func_)
721 apply_func = apply_func_;
723 obj_.get_coverage ().collect_coverage (&digest);
726 bool apply (OT::hb_ot_apply_context_t *c) const
728 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
733 hb_apply_func_t apply_func;
734 hb_set_digest_t digest;
737 typedef hb_vector_t<hb_applicable_t> array_t;
739 /* Dispatch interface. */
740 const char *get_name () { return "GET_SUBTABLES"; }
741 template <typename T>
742 return_t dispatch (const T &obj)
744 hb_applicable_t *entry = array.push();
745 entry->init (obj, apply_to<T>);
746 return hb_empty_t ();
748 static return_t default_return_value () { return hb_empty_t (); }
750 hb_get_subtables_context_t (array_t &array_) :
755 unsigned int debug_depth;
761 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
762 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
763 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
765 struct ContextClosureFuncs
767 intersects_func_t intersects;
769 struct ContextCollectGlyphsFuncs
771 collect_glyphs_func_t collect;
773 struct ContextApplyFuncs
779 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
781 return glyphs->has (value);
783 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
785 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
786 return class_def.intersects_class (glyphs, value);
788 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
790 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
791 return (data+coverage).intersects (glyphs);
794 static inline bool array_is_subset_of (const hb_set_t *glyphs,
796 const HBUINT16 values[],
797 intersects_func_t intersects_func,
798 const void *intersects_data)
800 for (const HBUINT16 &_ : + hb_iter (values, count))
801 if (!intersects_func (glyphs, _, intersects_data)) return false;
806 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
810 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
812 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
813 class_def.collect_class (glyphs, value);
815 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
817 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
818 (data+coverage).collect_coverage (glyphs);
820 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
823 const HBUINT16 values[],
824 collect_glyphs_func_t collect_func,
825 const void *collect_data)
828 + hb_iter (values, count)
829 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
834 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
836 return glyph_id == value;
838 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
840 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
841 return class_def.get_class (glyph_id) == value;
843 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
845 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
846 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
849 static inline bool would_match_input (hb_would_apply_context_t *c,
850 unsigned int count, /* Including the first glyph (not matched) */
851 const HBUINT16 input[], /* Array of input values--start with second glyph */
852 match_func_t match_func,
853 const void *match_data)
858 for (unsigned int i = 1; i < count; i++)
859 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
864 static inline bool match_input (hb_ot_apply_context_t *c,
865 unsigned int count, /* Including the first glyph (not matched) */
866 const HBUINT16 input[], /* Array of input values--start with second glyph */
867 match_func_t match_func,
868 const void *match_data,
869 unsigned int *end_offset,
870 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
871 unsigned int *p_total_component_count = nullptr)
873 TRACE_APPLY (nullptr);
875 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
877 hb_buffer_t *buffer = c->buffer;
879 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
880 skippy_iter.reset (buffer->idx, count - 1);
881 skippy_iter.set_match_func (match_func, match_data, input);
884 * This is perhaps the trickiest part of OpenType... Remarks:
886 * - If all components of the ligature were marks, we call this a mark ligature.
888 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
889 * it as a ligature glyph.
891 * - Ligatures cannot be formed across glyphs attached to different components
892 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
893 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
894 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
895 * There are a couple of exceptions to this:
897 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
898 * assuming that the font designer knows what they are doing (otherwise it can
899 * break Indic stuff when a matra wants to ligate with a conjunct,
901 * o If two marks want to ligate and they belong to different components of the
902 * same ligature glyph, and said ligature glyph is to be ignored according to
903 * mark-filtering rules, then allow.
904 * https://github.com/harfbuzz/harfbuzz/issues/545
907 unsigned int total_component_count = 0;
908 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
910 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
911 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
915 LIGBASE_MAY_NOT_SKIP,
917 } ligbase = LIGBASE_NOT_CHECKED;
919 match_positions[0] = buffer->idx;
920 for (unsigned int i = 1; i < count; i++)
922 if (!skippy_iter.next ()) return_trace (false);
924 match_positions[i] = skippy_iter.idx;
926 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
927 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
929 if (first_lig_id && first_lig_comp)
931 /* If first component was attached to a previous ligature component,
932 * all subsequent components should be attached to the same ligature
933 * component, otherwise we shouldn't ligate them... */
934 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
936 /* ...unless, we are attached to a base ligature and that base
937 * ligature is ignorable. */
938 if (ligbase == LIGBASE_NOT_CHECKED)
941 const auto *out = buffer->out_info;
942 unsigned int j = buffer->out_len;
943 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
945 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
954 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
955 ligbase = LIGBASE_MAY_SKIP;
957 ligbase = LIGBASE_MAY_NOT_SKIP;
960 if (ligbase == LIGBASE_MAY_NOT_SKIP)
961 return_trace (false);
966 /* If first component was NOT attached to a previous ligature component,
967 * all subsequent components should also NOT be attached to any ligature
968 * component, unless they are attached to the first component itself! */
969 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
970 return_trace (false);
973 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
976 *end_offset = skippy_iter.idx - buffer->idx + 1;
978 if (p_total_component_count)
979 *p_total_component_count = total_component_count;
983 static inline bool ligate_input (hb_ot_apply_context_t *c,
984 unsigned int count, /* Including the first glyph */
985 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
986 unsigned int match_length,
987 hb_codepoint_t lig_glyph,
988 unsigned int total_component_count)
990 TRACE_APPLY (nullptr);
992 hb_buffer_t *buffer = c->buffer;
994 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
996 /* - If a base and one or more marks ligate, consider that as a base, NOT
997 * ligature, such that all following marks can still attach to it.
998 * https://github.com/harfbuzz/harfbuzz/issues/1109
1000 * - If all components of the ligature were marks, we call this a mark ligature.
1001 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1002 * the ligature to keep its old ligature id. This will allow it to attach to
1003 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1004 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1005 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1006 * later, we don't want them to lose their ligature id/component, otherwise
1007 * GPOS will fail to correctly position the mark ligature on top of the
1008 * LAM,LAM,HEH ligature. See:
1009 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1011 * - If a ligature is formed of components that some of which are also ligatures
1012 * themselves, and those ligature components had marks attached to *their*
1013 * components, we have to attach the marks to the new ligature component
1014 * positions! Now *that*'s tricky! And these marks may be following the
1015 * last component of the whole sequence, so we should loop forward looking
1016 * for them and update them.
1018 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1019 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1020 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1021 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1022 * the new ligature with a component value of 2.
1024 * This in fact happened to a font... See:
1025 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1028 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1029 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1030 for (unsigned int i = 1; i < count; i++)
1031 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1033 is_base_ligature = false;
1034 is_mark_ligature = false;
1037 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1039 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1040 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1041 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1042 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1043 unsigned int components_so_far = last_num_components;
1047 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1048 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1050 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1053 c->replace_glyph_with_ligature (lig_glyph, klass);
1055 for (unsigned int i = 1; i < count; i++)
1057 while (buffer->idx < match_positions[i] && buffer->successful)
1061 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1063 this_comp = last_num_components;
1064 unsigned int new_lig_comp = components_so_far - last_num_components +
1065 hb_min (this_comp, last_num_components);
1066 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1068 buffer->next_glyph ();
1071 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1072 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1073 components_so_far += last_num_components;
1075 /* Skip the base glyph */
1079 if (!is_mark_ligature && last_lig_id)
1081 /* Re-adjust components for any marks following. */
1082 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1084 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1086 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1087 if (!this_comp) break;
1089 unsigned new_lig_comp = components_so_far - last_num_components +
1090 hb_min (this_comp, last_num_components);
1091 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1094 return_trace (true);
1097 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1099 const HBUINT16 backtrack[],
1100 match_func_t match_func,
1101 const void *match_data,
1102 unsigned int *match_start)
1104 TRACE_APPLY (nullptr);
1106 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1107 skippy_iter.reset (c->buffer->backtrack_len (), count);
1108 skippy_iter.set_match_func (match_func, match_data, backtrack);
1110 for (unsigned int i = 0; i < count; i++)
1111 if (!skippy_iter.prev ())
1112 return_trace (false);
1114 *match_start = skippy_iter.idx;
1116 return_trace (true);
1119 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1121 const HBUINT16 lookahead[],
1122 match_func_t match_func,
1123 const void *match_data,
1124 unsigned int offset,
1125 unsigned int *end_index)
1127 TRACE_APPLY (nullptr);
1129 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1130 skippy_iter.reset (c->buffer->idx + offset - 1, count);
1131 skippy_iter.set_match_func (match_func, match_data, lookahead);
1133 for (unsigned int i = 0; i < count; i++)
1134 if (!skippy_iter.next ())
1135 return_trace (false);
1137 *end_index = skippy_iter.idx + 1;
1139 return_trace (true);
1146 LookupRecord* copy (hb_serialize_context_t *c,
1147 const hb_map_t *lookup_map) const
1149 TRACE_SERIALIZE (this);
1150 auto *out = c->embed (*this);
1151 if (unlikely (!out)) return_trace (nullptr);
1153 out->lookupListIndex = hb_map_get (lookup_map, lookupListIndex);
1157 bool sanitize (hb_sanitize_context_t *c) const
1159 TRACE_SANITIZE (this);
1160 return_trace (c->check_struct (this));
1163 HBUINT16 sequenceIndex; /* Index into current glyph
1164 * sequence--first glyph = 0 */
1165 HBUINT16 lookupListIndex; /* Lookup to apply to that
1166 * position--zero--based */
1168 DEFINE_SIZE_STATIC (4);
1171 template <typename context_t>
1172 static inline void recurse_lookups (context_t *c,
1173 unsigned int lookupCount,
1174 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1176 for (unsigned int i = 0; i < lookupCount; i++)
1177 c->recurse (lookupRecord[i].lookupListIndex);
1180 static inline bool apply_lookup (hb_ot_apply_context_t *c,
1181 unsigned int count, /* Including the first glyph */
1182 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1183 unsigned int lookupCount,
1184 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1185 unsigned int match_length)
1187 TRACE_APPLY (nullptr);
1189 hb_buffer_t *buffer = c->buffer;
1192 /* All positions are distance from beginning of *output* buffer.
1195 unsigned int bl = buffer->backtrack_len ();
1196 end = bl + match_length;
1198 int delta = bl - buffer->idx;
1199 /* Convert positions to new indexing. */
1200 for (unsigned int j = 0; j < count; j++)
1201 match_positions[j] += delta;
1204 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1206 unsigned int idx = lookupRecord[i].sequenceIndex;
1210 /* Don't recurse to ourself at same position.
1211 * Note that this test is too naive, it doesn't catch longer loops. */
1212 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index)
1215 if (unlikely (!buffer->move_to (match_positions[idx])))
1218 if (unlikely (buffer->max_ops <= 0))
1221 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1222 if (!c->recurse (lookupRecord[i].lookupListIndex))
1225 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1226 int delta = new_len - orig_len;
1231 /* Recursed lookup changed buffer len. Adjust.
1235 * Right now, if buffer length increased by n, we assume n new glyphs
1236 * were added right after the current position, and if buffer length
1237 * was decreased by n, we assume n match positions after the current
1238 * one where removed. The former (buffer length increased) case is
1239 * fine, but the decrease case can be improved in at least two ways,
1240 * both of which are significant:
1242 * - If recursed-to lookup is MultipleSubst and buffer length
1243 * decreased, then it's current match position that was deleted,
1244 * NOT the one after it.
1246 * - If buffer length was decreased by n, it does not necessarily
1247 * mean that n match positions where removed, as there might
1248 * have been marks and default-ignorables in the sequence. We
1249 * should instead drop match positions between current-position
1250 * and current-position + n instead.
1252 * It should be possible to construct tests for both of these cases.
1256 if (end <= int (match_positions[idx]))
1258 /* End might end up being smaller than match_positions[idx] if the recursed
1259 * lookup ended up removing many items, more than we have had matched.
1260 * Just never rewind end back and get out of here.
1261 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1262 end = match_positions[idx];
1263 /* There can't be any further changes. */
1267 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1271 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1276 /* NOTE: delta is negative. */
1277 delta = hb_max (delta, (int) next - (int) count);
1282 memmove (match_positions + next + delta, match_positions + next,
1283 (count - next) * sizeof (match_positions[0]));
1287 /* Fill in new entries. */
1288 for (unsigned int j = idx + 1; j < next; j++)
1289 match_positions[j] = match_positions[j - 1] + 1;
1291 /* And fixup the rest. */
1292 for (; next < count; next++)
1293 match_positions[next] += delta;
1296 buffer->move_to (end);
1298 return_trace (true);
1303 /* Contextual lookups */
1305 struct ContextClosureLookupContext
1307 ContextClosureFuncs funcs;
1308 const void *intersects_data;
1311 struct ContextCollectGlyphsLookupContext
1313 ContextCollectGlyphsFuncs funcs;
1314 const void *collect_data;
1317 struct ContextApplyLookupContext
1319 ContextApplyFuncs funcs;
1320 const void *match_data;
1323 static inline bool context_intersects (const hb_set_t *glyphs,
1324 unsigned int inputCount, /* Including the first glyph (not matched) */
1325 const HBUINT16 input[], /* Array of input values--start with second glyph */
1326 ContextClosureLookupContext &lookup_context)
1328 return array_is_subset_of (glyphs,
1329 inputCount ? inputCount - 1 : 0, input,
1330 lookup_context.funcs.intersects, lookup_context.intersects_data);
1333 static inline void context_closure_lookup (hb_closure_context_t *c,
1334 unsigned int inputCount, /* Including the first glyph (not matched) */
1335 const HBUINT16 input[], /* Array of input values--start with second glyph */
1336 unsigned int lookupCount,
1337 const LookupRecord lookupRecord[],
1338 ContextClosureLookupContext &lookup_context)
1340 if (context_intersects (c->glyphs,
1344 lookupCount, lookupRecord);
1347 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1348 unsigned int inputCount, /* Including the first glyph (not matched) */
1349 const HBUINT16 input[], /* Array of input values--start with second glyph */
1350 unsigned int lookupCount,
1351 const LookupRecord lookupRecord[],
1352 ContextCollectGlyphsLookupContext &lookup_context)
1354 collect_array (c, c->input,
1355 inputCount ? inputCount - 1 : 0, input,
1356 lookup_context.funcs.collect, lookup_context.collect_data);
1358 lookupCount, lookupRecord);
1361 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1362 unsigned int inputCount, /* Including the first glyph (not matched) */
1363 const HBUINT16 input[], /* Array of input values--start with second glyph */
1364 unsigned int lookupCount HB_UNUSED,
1365 const LookupRecord lookupRecord[] HB_UNUSED,
1366 ContextApplyLookupContext &lookup_context)
1368 return would_match_input (c,
1370 lookup_context.funcs.match, lookup_context.match_data);
1372 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1373 unsigned int inputCount, /* Including the first glyph (not matched) */
1374 const HBUINT16 input[], /* Array of input values--start with second glyph */
1375 unsigned int lookupCount,
1376 const LookupRecord lookupRecord[],
1377 ContextApplyLookupContext &lookup_context)
1379 unsigned int match_length = 0;
1380 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1381 return match_input (c,
1383 lookup_context.funcs.match, lookup_context.match_data,
1384 &match_length, match_positions)
1385 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length),
1387 inputCount, match_positions,
1388 lookupCount, lookupRecord,
1394 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1396 return context_intersects (glyphs,
1397 inputCount, inputZ.arrayZ,
1401 void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1403 if (unlikely (c->lookup_limit_exceeded ())) return;
1405 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1406 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1407 context_closure_lookup (c,
1408 inputCount, inputZ.arrayZ,
1409 lookupCount, lookupRecord.arrayZ,
1413 void closure_lookups (hb_closure_lookups_context_t *c) const
1415 if (unlikely (c->lookup_limit_exceeded ())) return;
1417 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1418 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1419 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1422 void collect_glyphs (hb_collect_glyphs_context_t *c,
1423 ContextCollectGlyphsLookupContext &lookup_context) const
1425 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1426 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1427 context_collect_glyphs_lookup (c,
1428 inputCount, inputZ.arrayZ,
1429 lookupCount, lookupRecord.arrayZ,
1433 bool would_apply (hb_would_apply_context_t *c,
1434 ContextApplyLookupContext &lookup_context) const
1436 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1437 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1438 return context_would_apply_lookup (c,
1439 inputCount, inputZ.arrayZ,
1440 lookupCount, lookupRecord.arrayZ,
1444 bool apply (hb_ot_apply_context_t *c,
1445 ContextApplyLookupContext &lookup_context) const
1448 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1449 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1450 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1453 bool serialize (hb_serialize_context_t *c,
1454 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1455 const hb_map_t *lookup_map) const
1457 TRACE_SERIALIZE (this);
1458 auto *out = c->start_embed (this);
1459 if (unlikely (!c->extend_min (out))) return_trace (false);
1461 out->inputCount = inputCount;
1462 out->lookupCount = lookupCount;
1464 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1465 for (const auto org : input)
1468 d = input_mapping->get (org);
1472 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1473 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1474 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
1475 c->copy (lookupRecord[i], lookup_map);
1477 return_trace (true);
1480 bool subset (hb_subset_context_t *c,
1481 const hb_map_t *lookup_map,
1482 const hb_map_t *klass_map = nullptr) const
1484 TRACE_SUBSET (this);
1486 const hb_array_t<const HBUINT16> input = inputZ.as_array ((inputCount ? inputCount - 1 : 0));
1487 if (!input.length) return_trace (false);
1489 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1490 if (!hb_all (input, mapping)) return_trace (false);
1491 return_trace (serialize (c->serializer, mapping, lookup_map));
1495 bool sanitize (hb_sanitize_context_t *c) const
1497 TRACE_SANITIZE (this);
1498 return_trace (inputCount.sanitize (c) &&
1499 lookupCount.sanitize (c) &&
1500 c->check_range (inputZ.arrayZ,
1501 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1502 LookupRecord::static_size * lookupCount));
1506 HBUINT16 inputCount; /* Total number of glyphs in input
1507 * glyph sequence--includes the first
1509 HBUINT16 lookupCount; /* Number of LookupRecords */
1510 UnsizedArrayOf<HBUINT16>
1511 inputZ; /* Array of match inputs--start with
1513 /*UnsizedArrayOf<LookupRecord>
1514 lookupRecordX;*/ /* Array of LookupRecords--in
1517 DEFINE_SIZE_ARRAY (4, inputZ);
1522 bool intersects (const hb_set_t *glyphs,
1523 ContextClosureLookupContext &lookup_context) const
1527 | hb_map (hb_add (this))
1528 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1533 void closure (hb_closure_context_t *c,
1534 ContextClosureLookupContext &lookup_context) const
1536 if (unlikely (c->lookup_limit_exceeded ())) return;
1540 | hb_map (hb_add (this))
1541 | hb_apply ([&] (const Rule &_) { _.closure (c, lookup_context); })
1545 void closure_lookups (hb_closure_lookups_context_t *c) const
1547 if (unlikely (c->lookup_limit_exceeded ())) return;
1551 | hb_map (hb_add (this))
1552 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c); })
1556 void collect_glyphs (hb_collect_glyphs_context_t *c,
1557 ContextCollectGlyphsLookupContext &lookup_context) const
1561 | hb_map (hb_add (this))
1562 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1566 bool would_apply (hb_would_apply_context_t *c,
1567 ContextApplyLookupContext &lookup_context) const
1571 | hb_map (hb_add (this))
1572 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1577 bool apply (hb_ot_apply_context_t *c,
1578 ContextApplyLookupContext &lookup_context) const
1583 | hb_map (hb_add (this))
1584 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1590 bool subset (hb_subset_context_t *c,
1591 const hb_map_t *lookup_map,
1592 const hb_map_t *klass_map = nullptr) const
1594 TRACE_SUBSET (this);
1596 auto snap = c->serializer->snapshot ();
1597 auto *out = c->serializer->start_embed (*this);
1598 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1600 for (const OffsetTo<Rule>& _ : rule)
1603 auto *o = out->rule.serialize_append (c->serializer);
1604 if (unlikely (!o)) continue;
1606 auto o_snap = c->serializer->snapshot ();
1607 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1610 c->serializer->revert (o_snap);
1614 bool ret = bool (out->rule);
1615 if (!ret) c->serializer->revert (snap);
1620 bool sanitize (hb_sanitize_context_t *c) const
1622 TRACE_SANITIZE (this);
1623 return_trace (rule.sanitize (c, this));
1628 rule; /* Array of Rule tables
1629 * ordered by preference */
1631 DEFINE_SIZE_ARRAY (2, rule);
1635 struct ContextFormat1
1637 bool intersects (const hb_set_t *glyphs) const
1639 struct ContextClosureLookupContext lookup_context = {
1645 + hb_zip (this+coverage, ruleSet)
1646 | hb_filter (*glyphs, hb_first)
1647 | hb_map (hb_second)
1648 | hb_map (hb_add (this))
1649 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1654 void closure (hb_closure_context_t *c) const
1656 struct ContextClosureLookupContext lookup_context = {
1661 + hb_zip (this+coverage, ruleSet)
1662 | hb_filter (*c->glyphs, hb_first)
1663 | hb_map (hb_second)
1664 | hb_map (hb_add (this))
1665 | hb_apply ([&] (const RuleSet &_) { _.closure (c, lookup_context); })
1669 void closure_lookups (hb_closure_lookups_context_t *c) const
1672 | hb_map (hb_add (this))
1673 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c); })
1677 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1679 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1681 (this+coverage).collect_coverage (c->input);
1683 struct ContextCollectGlyphsLookupContext lookup_context = {
1689 | hb_map (hb_add (this))
1690 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1694 bool would_apply (hb_would_apply_context_t *c) const
1696 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1697 struct ContextApplyLookupContext lookup_context = {
1701 return rule_set.would_apply (c, lookup_context);
1704 const Coverage &get_coverage () const { return this+coverage; }
1706 bool apply (hb_ot_apply_context_t *c) const
1709 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1710 if (likely (index == NOT_COVERED))
1711 return_trace (false);
1713 const RuleSet &rule_set = this+ruleSet[index];
1714 struct ContextApplyLookupContext lookup_context = {
1718 return_trace (rule_set.apply (c, lookup_context));
1721 bool subset (hb_subset_context_t *c) const
1723 TRACE_SUBSET (this);
1724 const hb_set_t &glyphset = *c->plan->glyphset ();
1725 const hb_map_t &glyph_map = *c->plan->glyph_map;
1727 auto *out = c->serializer->start_embed (*this);
1728 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1729 out->format = format;
1731 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1732 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1733 + hb_zip (this+coverage, ruleSet)
1734 | hb_filter (glyphset, hb_first)
1735 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1737 | hb_map (glyph_map)
1738 | hb_sink (new_coverage)
1741 out->coverage.serialize (c->serializer, out)
1742 .serialize (c->serializer, new_coverage.iter ());
1743 return_trace (bool (new_coverage));
1746 bool sanitize (hb_sanitize_context_t *c) const
1748 TRACE_SANITIZE (this);
1749 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1753 HBUINT16 format; /* Format identifier--format = 1 */
1755 coverage; /* Offset to Coverage table--from
1756 * beginning of table */
1757 OffsetArrayOf<RuleSet>
1758 ruleSet; /* Array of RuleSet tables
1759 * ordered by Coverage Index */
1761 DEFINE_SIZE_ARRAY (6, ruleSet);
1765 struct ContextFormat2
1767 bool intersects (const hb_set_t *glyphs) const
1769 if (!(this+coverage).intersects (glyphs))
1772 const ClassDef &class_def = this+classDef;
1774 struct ContextClosureLookupContext lookup_context = {
1780 + hb_enumerate (ruleSet)
1781 | hb_map ([&] (const hb_pair_t<unsigned, const OffsetTo<RuleSet> &> p)
1782 { return class_def.intersects_class (glyphs, p.first) &&
1783 (this+p.second).intersects (glyphs, lookup_context); })
1788 void closure (hb_closure_context_t *c) const
1790 if (!(this+coverage).intersects (c->glyphs))
1793 const ClassDef &class_def = this+classDef;
1795 struct ContextClosureLookupContext lookup_context = {
1801 + hb_enumerate (ruleSet)
1802 | hb_filter ([&] (unsigned _)
1803 { return class_def.intersects_class (c->glyphs, _); },
1805 | hb_map (hb_second)
1806 | hb_map (hb_add (this))
1807 | hb_apply ([&] (const RuleSet &_) { _.closure (c, lookup_context); })
1811 void closure_lookups (hb_closure_lookups_context_t *c) const
1814 | hb_map (hb_add (this))
1815 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c); })
1819 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1821 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1823 (this+coverage).collect_coverage (c->input);
1825 const ClassDef &class_def = this+classDef;
1826 struct ContextCollectGlyphsLookupContext lookup_context = {
1832 | hb_map (hb_add (this))
1833 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1837 bool would_apply (hb_would_apply_context_t *c) const
1839 const ClassDef &class_def = this+classDef;
1840 unsigned int index = class_def.get_class (c->glyphs[0]);
1841 const RuleSet &rule_set = this+ruleSet[index];
1842 struct ContextApplyLookupContext lookup_context = {
1846 return rule_set.would_apply (c, lookup_context);
1849 const Coverage &get_coverage () const { return this+coverage; }
1851 bool apply (hb_ot_apply_context_t *c) const
1854 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1855 if (likely (index == NOT_COVERED)) return_trace (false);
1857 const ClassDef &class_def = this+classDef;
1858 index = class_def.get_class (c->buffer->cur().codepoint);
1859 const RuleSet &rule_set = this+ruleSet[index];
1860 struct ContextApplyLookupContext lookup_context = {
1864 return_trace (rule_set.apply (c, lookup_context));
1867 bool subset (hb_subset_context_t *c) const
1869 TRACE_SUBSET (this);
1870 auto *out = c->serializer->start_embed (*this);
1871 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1872 out->format = format;
1873 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
1874 return_trace (false);
1877 out->classDef.serialize_subset (c, classDef, this, &klass_map);
1879 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1881 unsigned non_zero_index = 0, index = 0;
1882 for (const hb_pair_t<unsigned, const OffsetTo<RuleSet>&> _ : + hb_enumerate (ruleSet)
1883 | hb_filter (klass_map, hb_first))
1885 auto *o = out->ruleSet.serialize_append (c->serializer);
1892 if (o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
1893 non_zero_index = index;
1898 if (!ret) return_trace (ret);
1900 //prune empty trailing ruleSets
1902 while (index > non_zero_index)
1904 out->ruleSet.pop ();
1908 return_trace (bool (out->ruleSet));
1911 bool sanitize (hb_sanitize_context_t *c) const
1913 TRACE_SANITIZE (this);
1914 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1918 HBUINT16 format; /* Format identifier--format = 2 */
1920 coverage; /* Offset to Coverage table--from
1921 * beginning of table */
1923 classDef; /* Offset to glyph ClassDef table--from
1924 * beginning of table */
1925 OffsetArrayOf<RuleSet>
1926 ruleSet; /* Array of RuleSet tables
1927 * ordered by class */
1929 DEFINE_SIZE_ARRAY (8, ruleSet);
1933 struct ContextFormat3
1935 bool intersects (const hb_set_t *glyphs) const
1937 if (!(this+coverageZ[0]).intersects (glyphs))
1940 struct ContextClosureLookupContext lookup_context = {
1941 {intersects_coverage},
1944 return context_intersects (glyphs,
1945 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1949 void closure (hb_closure_context_t *c) const
1951 if (!(this+coverageZ[0]).intersects (c->glyphs))
1954 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1955 struct ContextClosureLookupContext lookup_context = {
1956 {intersects_coverage},
1959 context_closure_lookup (c,
1960 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1961 lookupCount, lookupRecord,
1965 void closure_lookups (hb_closure_lookups_context_t *c) const
1967 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1968 recurse_lookups (c, lookupCount, lookupRecord);
1971 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1973 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1975 (this+coverageZ[0]).collect_coverage (c->input);
1977 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1978 struct ContextCollectGlyphsLookupContext lookup_context = {
1983 context_collect_glyphs_lookup (c,
1984 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1985 lookupCount, lookupRecord,
1989 bool would_apply (hb_would_apply_context_t *c) const
1991 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1992 struct ContextApplyLookupContext lookup_context = {
1996 return context_would_apply_lookup (c,
1997 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1998 lookupCount, lookupRecord,
2002 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2004 bool apply (hb_ot_apply_context_t *c) const
2007 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2008 if (likely (index == NOT_COVERED)) return_trace (false);
2010 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2011 struct ContextApplyLookupContext lookup_context = {
2015 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2018 bool subset (hb_subset_context_t *c) const
2020 TRACE_SUBSET (this);
2021 auto *out = c->serializer->start_embed (this);
2022 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2024 out->format = format;
2025 out->glyphCount = glyphCount;
2026 out->lookupCount = lookupCount;
2028 const hb_array_t<const OffsetTo<Coverage>> coverages = coverageZ.as_array (glyphCount);
2030 for (const OffsetTo<Coverage>& offset : coverages)
2032 auto *o = c->serializer->allocate_size<OffsetTo<Coverage>> (OffsetTo<Coverage>::static_size);
2033 if (unlikely (!o)) return_trace (false);
2034 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2037 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2038 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2039 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2040 c->serializer->copy (lookupRecord[i], lookup_map);
2042 return_trace (true);
2045 bool sanitize (hb_sanitize_context_t *c) const
2047 TRACE_SANITIZE (this);
2048 if (!c->check_struct (this)) return_trace (false);
2049 unsigned int count = glyphCount;
2050 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2051 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2052 for (unsigned int i = 0; i < count; i++)
2053 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2054 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2055 return_trace (c->check_array (lookupRecord, lookupCount));
2059 HBUINT16 format; /* Format identifier--format = 3 */
2060 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2062 HBUINT16 lookupCount; /* Number of LookupRecords */
2063 UnsizedArrayOf<OffsetTo<Coverage>>
2064 coverageZ; /* Array of offsets to Coverage
2065 * table in glyph sequence order */
2066 /*UnsizedArrayOf<LookupRecord>
2067 lookupRecordX;*/ /* Array of LookupRecords--in
2070 DEFINE_SIZE_ARRAY (6, coverageZ);
2075 template <typename context_t, typename ...Ts>
2076 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2078 TRACE_DISPATCH (this, u.format);
2079 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2081 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2082 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
2083 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
2084 default:return_trace (c->default_return_value ());
2090 HBUINT16 format; /* Format identifier */
2091 ContextFormat1 format1;
2092 ContextFormat2 format2;
2093 ContextFormat3 format3;
2098 /* Chaining Contextual lookups */
2100 struct ChainContextClosureLookupContext
2102 ContextClosureFuncs funcs;
2103 const void *intersects_data[3];
2106 struct ChainContextCollectGlyphsLookupContext
2108 ContextCollectGlyphsFuncs funcs;
2109 const void *collect_data[3];
2112 struct ChainContextApplyLookupContext
2114 ContextApplyFuncs funcs;
2115 const void *match_data[3];
2118 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2119 unsigned int backtrackCount,
2120 const HBUINT16 backtrack[],
2121 unsigned int inputCount, /* Including the first glyph (not matched) */
2122 const HBUINT16 input[], /* Array of input values--start with second glyph */
2123 unsigned int lookaheadCount,
2124 const HBUINT16 lookahead[],
2125 ChainContextClosureLookupContext &lookup_context)
2127 return array_is_subset_of (glyphs,
2128 backtrackCount, backtrack,
2129 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2130 && array_is_subset_of (glyphs,
2131 inputCount ? inputCount - 1 : 0, input,
2132 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2133 && array_is_subset_of (glyphs,
2134 lookaheadCount, lookahead,
2135 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2138 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2139 unsigned int backtrackCount,
2140 const HBUINT16 backtrack[],
2141 unsigned int inputCount, /* Including the first glyph (not matched) */
2142 const HBUINT16 input[], /* Array of input values--start with second glyph */
2143 unsigned int lookaheadCount,
2144 const HBUINT16 lookahead[],
2145 unsigned int lookupCount,
2146 const LookupRecord lookupRecord[],
2147 ChainContextClosureLookupContext &lookup_context)
2149 if (chain_context_intersects (c->glyphs,
2150 backtrackCount, backtrack,
2152 lookaheadCount, lookahead,
2155 lookupCount, lookupRecord);
2158 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2159 unsigned int backtrackCount,
2160 const HBUINT16 backtrack[],
2161 unsigned int inputCount, /* Including the first glyph (not matched) */
2162 const HBUINT16 input[], /* Array of input values--start with second glyph */
2163 unsigned int lookaheadCount,
2164 const HBUINT16 lookahead[],
2165 unsigned int lookupCount,
2166 const LookupRecord lookupRecord[],
2167 ChainContextCollectGlyphsLookupContext &lookup_context)
2169 collect_array (c, c->before,
2170 backtrackCount, backtrack,
2171 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2172 collect_array (c, c->input,
2173 inputCount ? inputCount - 1 : 0, input,
2174 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2175 collect_array (c, c->after,
2176 lookaheadCount, lookahead,
2177 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2179 lookupCount, lookupRecord);
2182 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2183 unsigned int backtrackCount,
2184 const HBUINT16 backtrack[] HB_UNUSED,
2185 unsigned int inputCount, /* Including the first glyph (not matched) */
2186 const HBUINT16 input[], /* Array of input values--start with second glyph */
2187 unsigned int lookaheadCount,
2188 const HBUINT16 lookahead[] HB_UNUSED,
2189 unsigned int lookupCount HB_UNUSED,
2190 const LookupRecord lookupRecord[] HB_UNUSED,
2191 ChainContextApplyLookupContext &lookup_context)
2193 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2194 && would_match_input (c,
2196 lookup_context.funcs.match, lookup_context.match_data[1]);
2199 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2200 unsigned int backtrackCount,
2201 const HBUINT16 backtrack[],
2202 unsigned int inputCount, /* Including the first glyph (not matched) */
2203 const HBUINT16 input[], /* Array of input values--start with second glyph */
2204 unsigned int lookaheadCount,
2205 const HBUINT16 lookahead[],
2206 unsigned int lookupCount,
2207 const LookupRecord lookupRecord[],
2208 ChainContextApplyLookupContext &lookup_context)
2210 unsigned int start_index = 0, match_length = 0, end_index = 0;
2211 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
2212 return match_input (c,
2214 lookup_context.funcs.match, lookup_context.match_data[1],
2215 &match_length, match_positions)
2216 && match_backtrack (c,
2217 backtrackCount, backtrack,
2218 lookup_context.funcs.match, lookup_context.match_data[0],
2220 && match_lookahead (c,
2221 lookaheadCount, lookahead,
2222 lookup_context.funcs.match, lookup_context.match_data[2],
2223 match_length, &end_index)
2224 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index),
2226 inputCount, match_positions,
2227 lookupCount, lookupRecord,
2233 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2235 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2236 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2237 return chain_context_intersects (glyphs,
2238 backtrack.len, backtrack.arrayZ,
2239 input.lenP1, input.arrayZ,
2240 lookahead.len, lookahead.arrayZ,
2244 void closure (hb_closure_context_t *c,
2245 ChainContextClosureLookupContext &lookup_context) const
2247 if (unlikely (c->lookup_limit_exceeded ())) return;
2249 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2250 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2251 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2252 chain_context_closure_lookup (c,
2253 backtrack.len, backtrack.arrayZ,
2254 input.lenP1, input.arrayZ,
2255 lookahead.len, lookahead.arrayZ,
2256 lookup.len, lookup.arrayZ,
2260 void closure_lookups (hb_closure_lookups_context_t *c) const
2262 if (unlikely (c->lookup_limit_exceeded ())) return;
2264 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2265 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2266 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2267 recurse_lookups (c, lookup.len, lookup.arrayZ);
2270 void collect_glyphs (hb_collect_glyphs_context_t *c,
2271 ChainContextCollectGlyphsLookupContext &lookup_context) const
2273 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2274 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2275 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2276 chain_context_collect_glyphs_lookup (c,
2277 backtrack.len, backtrack.arrayZ,
2278 input.lenP1, input.arrayZ,
2279 lookahead.len, lookahead.arrayZ,
2280 lookup.len, lookup.arrayZ,
2284 bool would_apply (hb_would_apply_context_t *c,
2285 ChainContextApplyLookupContext &lookup_context) const
2287 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2288 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2289 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2290 return chain_context_would_apply_lookup (c,
2291 backtrack.len, backtrack.arrayZ,
2292 input.lenP1, input.arrayZ,
2293 lookahead.len, lookahead.arrayZ, lookup.len,
2294 lookup.arrayZ, lookup_context);
2297 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2300 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2301 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2302 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2303 return_trace (chain_context_apply_lookup (c,
2304 backtrack.len, backtrack.arrayZ,
2305 input.lenP1, input.arrayZ,
2306 lookahead.len, lookahead.arrayZ, lookup.len,
2307 lookup.arrayZ, lookup_context));
2310 template<typename Iterator,
2311 hb_requires (hb_is_iterator (Iterator))>
2312 void serialize_array (hb_serialize_context_t *c,
2317 for (const auto g : it)
2325 ChainRule* copy (hb_serialize_context_t *c,
2326 const hb_map_t *lookup_map,
2327 const hb_map_t *backtrack_map,
2328 const hb_map_t *input_map = nullptr,
2329 const hb_map_t *lookahead_map = nullptr) const
2331 TRACE_SERIALIZE (this);
2332 auto *out = c->start_embed (this);
2333 if (unlikely (!out)) return_trace (nullptr);
2335 const hb_map_t *mapping = backtrack_map;
2336 serialize_array (c, backtrack.len, + backtrack.iter ()
2337 | hb_map (mapping));
2339 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2340 if (input_map) mapping = input_map;
2341 serialize_array (c, input.lenP1, + input.iter ()
2342 | hb_map (mapping));
2344 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2345 if (lookahead_map) mapping = lookahead_map;
2346 serialize_array (c, lookahead.len, + lookahead.iter ()
2347 | hb_map (mapping));
2349 const ArrayOf<LookupRecord> &lookupRecord = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2350 HBUINT16 lookupCount;
2351 lookupCount = lookupRecord.len;
2352 if (!c->copy (lookupCount)) return_trace (nullptr);
2354 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2355 if (!c->copy (lookupRecord[i], lookup_map)) return_trace (nullptr);
2360 bool subset (hb_subset_context_t *c,
2361 const hb_map_t *lookup_map,
2362 const hb_map_t *backtrack_map = nullptr,
2363 const hb_map_t *input_map = nullptr,
2364 const hb_map_t *lookahead_map = nullptr) const
2366 TRACE_SUBSET (this);
2368 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2369 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2373 const hb_set_t &glyphset = *c->plan->glyphset ();
2374 if (!hb_all (backtrack, glyphset) ||
2375 !hb_all (input, glyphset) ||
2376 !hb_all (lookahead, glyphset))
2377 return_trace (false);
2379 copy (c->serializer, lookup_map, c->plan->glyph_map);
2383 if (!hb_all (backtrack, backtrack_map) ||
2384 !hb_all (input, input_map) ||
2385 !hb_all (lookahead, lookahead_map))
2386 return_trace (false);
2388 copy (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2391 return_trace (true);
2394 bool sanitize (hb_sanitize_context_t *c) const
2396 TRACE_SANITIZE (this);
2397 if (!backtrack.sanitize (c)) return_trace (false);
2398 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2399 if (!input.sanitize (c)) return_trace (false);
2400 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2401 if (!lookahead.sanitize (c)) return_trace (false);
2402 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2403 return_trace (lookup.sanitize (c));
2408 backtrack; /* Array of backtracking values
2409 * (to be matched before the input
2411 HeadlessArrayOf<HBUINT16>
2412 inputX; /* Array of input values (start with
2415 lookaheadX; /* Array of lookahead values's (to be
2416 * matched after the input sequence) */
2417 ArrayOf<LookupRecord>
2418 lookupX; /* Array of LookupRecords--in
2421 DEFINE_SIZE_MIN (8);
2426 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2430 | hb_map (hb_add (this))
2431 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2435 void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
2437 if (unlikely (c->lookup_limit_exceeded ())) return;
2441 | hb_map (hb_add (this))
2442 | hb_apply ([&] (const ChainRule &_) { _.closure (c, lookup_context); })
2446 void closure_lookups (hb_closure_lookups_context_t *c) const
2448 if (unlikely (c->lookup_limit_exceeded ())) return;
2452 | hb_map (hb_add (this))
2453 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c); })
2457 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2461 | hb_map (hb_add (this))
2462 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2466 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2470 | hb_map (hb_add (this))
2471 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2476 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2481 | hb_map (hb_add (this))
2482 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2488 bool subset (hb_subset_context_t *c,
2489 const hb_map_t *lookup_map,
2490 const hb_map_t *backtrack_klass_map = nullptr,
2491 const hb_map_t *input_klass_map = nullptr,
2492 const hb_map_t *lookahead_klass_map = nullptr) const
2494 TRACE_SUBSET (this);
2496 auto snap = c->serializer->snapshot ();
2497 auto *out = c->serializer->start_embed (*this);
2498 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2500 for (const OffsetTo<ChainRule>& _ : rule)
2503 auto *o = out->rule.serialize_append (c->serializer);
2504 if (unlikely (!o)) continue;
2506 auto o_snap = c->serializer->snapshot ();
2507 if (!o->serialize_subset (c, _, this,
2509 backtrack_klass_map,
2511 lookahead_klass_map))
2514 c->serializer->revert (o_snap);
2518 bool ret = bool (out->rule);
2519 if (!ret) c->serializer->revert (snap);
2524 bool sanitize (hb_sanitize_context_t *c) const
2526 TRACE_SANITIZE (this);
2527 return_trace (rule.sanitize (c, this));
2531 OffsetArrayOf<ChainRule>
2532 rule; /* Array of ChainRule tables
2533 * ordered by preference */
2535 DEFINE_SIZE_ARRAY (2, rule);
2538 struct ChainContextFormat1
2540 bool intersects (const hb_set_t *glyphs) const
2542 struct ChainContextClosureLookupContext lookup_context = {
2544 {nullptr, nullptr, nullptr}
2548 + hb_zip (this+coverage, ruleSet)
2549 | hb_filter (*glyphs, hb_first)
2550 | hb_map (hb_second)
2551 | hb_map (hb_add (this))
2552 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2557 void closure (hb_closure_context_t *c) const
2559 struct ChainContextClosureLookupContext lookup_context = {
2561 {nullptr, nullptr, nullptr}
2564 + hb_zip (this+coverage, ruleSet)
2565 | hb_filter (*c->glyphs, hb_first)
2566 | hb_map (hb_second)
2567 | hb_map (hb_add (this))
2568 | hb_apply ([&] (const ChainRuleSet &_) { _.closure (c, lookup_context); })
2572 void closure_lookups (hb_closure_lookups_context_t *c) const
2575 | hb_map (hb_add (this))
2576 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c); })
2580 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2582 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2584 (this+coverage).collect_coverage (c->input);
2586 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2588 {nullptr, nullptr, nullptr}
2592 | hb_map (hb_add (this))
2593 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2597 bool would_apply (hb_would_apply_context_t *c) const
2599 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2600 struct ChainContextApplyLookupContext lookup_context = {
2602 {nullptr, nullptr, nullptr}
2604 return rule_set.would_apply (c, lookup_context);
2607 const Coverage &get_coverage () const { return this+coverage; }
2609 bool apply (hb_ot_apply_context_t *c) const
2612 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2613 if (likely (index == NOT_COVERED)) return_trace (false);
2615 const ChainRuleSet &rule_set = this+ruleSet[index];
2616 struct ChainContextApplyLookupContext lookup_context = {
2618 {nullptr, nullptr, nullptr}
2620 return_trace (rule_set.apply (c, lookup_context));
2623 bool subset (hb_subset_context_t *c) const
2625 TRACE_SUBSET (this);
2626 const hb_set_t &glyphset = *c->plan->glyphset ();
2627 const hb_map_t &glyph_map = *c->plan->glyph_map;
2629 auto *out = c->serializer->start_embed (*this);
2630 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2631 out->format = format;
2633 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2634 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2635 + hb_zip (this+coverage, ruleSet)
2636 | hb_filter (glyphset, hb_first)
2637 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2639 | hb_map (glyph_map)
2640 | hb_sink (new_coverage)
2643 out->coverage.serialize (c->serializer, out)
2644 .serialize (c->serializer, new_coverage.iter ());
2645 return_trace (bool (new_coverage));
2648 bool sanitize (hb_sanitize_context_t *c) const
2650 TRACE_SANITIZE (this);
2651 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2655 HBUINT16 format; /* Format identifier--format = 1 */
2657 coverage; /* Offset to Coverage table--from
2658 * beginning of table */
2659 OffsetArrayOf<ChainRuleSet>
2660 ruleSet; /* Array of ChainRuleSet tables
2661 * ordered by Coverage Index */
2663 DEFINE_SIZE_ARRAY (6, ruleSet);
2666 struct ChainContextFormat2
2668 bool intersects (const hb_set_t *glyphs) const
2670 if (!(this+coverage).intersects (glyphs))
2673 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2674 const ClassDef &input_class_def = this+inputClassDef;
2675 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2677 struct ChainContextClosureLookupContext lookup_context = {
2679 {&backtrack_class_def,
2681 &lookahead_class_def}
2685 + hb_enumerate (ruleSet)
2686 | hb_map ([&] (const hb_pair_t<unsigned, const OffsetTo<ChainRuleSet> &> p)
2687 { return input_class_def.intersects_class (glyphs, p.first) &&
2688 (this+p.second).intersects (glyphs, lookup_context); })
2692 void closure (hb_closure_context_t *c) const
2694 if (!(this+coverage).intersects (c->glyphs))
2697 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2698 const ClassDef &input_class_def = this+inputClassDef;
2699 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2701 struct ChainContextClosureLookupContext lookup_context = {
2703 {&backtrack_class_def,
2705 &lookahead_class_def}
2709 + hb_enumerate (ruleSet)
2710 | hb_filter ([&] (unsigned _)
2711 { return input_class_def.intersects_class (c->glyphs, _); },
2713 | hb_map (hb_second)
2714 | hb_map (hb_add (this))
2715 | hb_apply ([&] (const ChainRuleSet &_) { _.closure (c, lookup_context); })
2719 void closure_lookups (hb_closure_lookups_context_t *c) const
2722 | hb_map (hb_add (this))
2723 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c); })
2727 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2729 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2731 (this+coverage).collect_coverage (c->input);
2733 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2734 const ClassDef &input_class_def = this+inputClassDef;
2735 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2737 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2739 {&backtrack_class_def,
2741 &lookahead_class_def}
2745 | hb_map (hb_add (this))
2746 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2750 bool would_apply (hb_would_apply_context_t *c) const
2752 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2753 const ClassDef &input_class_def = this+inputClassDef;
2754 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2756 unsigned int index = input_class_def.get_class (c->glyphs[0]);
2757 const ChainRuleSet &rule_set = this+ruleSet[index];
2758 struct ChainContextApplyLookupContext lookup_context = {
2760 {&backtrack_class_def,
2762 &lookahead_class_def}
2764 return rule_set.would_apply (c, lookup_context);
2767 const Coverage &get_coverage () const { return this+coverage; }
2769 bool apply (hb_ot_apply_context_t *c) const
2772 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2773 if (likely (index == NOT_COVERED)) return_trace (false);
2775 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2776 const ClassDef &input_class_def = this+inputClassDef;
2777 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2779 index = input_class_def.get_class (c->buffer->cur().codepoint);
2780 const ChainRuleSet &rule_set = this+ruleSet[index];
2781 struct ChainContextApplyLookupContext lookup_context = {
2783 {&backtrack_class_def,
2785 &lookahead_class_def}
2787 return_trace (rule_set.apply (c, lookup_context));
2790 bool subset (hb_subset_context_t *c) const
2792 TRACE_SUBSET (this);
2793 auto *out = c->serializer->start_embed (*this);
2794 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2795 out->format = format;
2796 out->coverage.serialize_subset (c, coverage, this);
2798 hb_map_t backtrack_klass_map;
2799 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
2801 // subset inputClassDef based on glyphs survived in Coverage subsetting
2802 hb_map_t input_klass_map;
2803 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
2805 hb_map_t lookahead_klass_map;
2806 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
2808 unsigned non_zero_index = 0, index = 0;
2810 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2811 for (const OffsetTo<ChainRuleSet>& _ : + hb_enumerate (ruleSet)
2812 | hb_filter (input_klass_map, hb_first)
2813 | hb_map (hb_second))
2815 auto *o = out->ruleSet.serialize_append (c->serializer);
2821 if (o->serialize_subset (c, _, this,
2823 &backtrack_klass_map,
2825 &lookahead_klass_map))
2826 non_zero_index = index;
2831 if (!ret) return_trace (ret);
2833 //prune empty trailing ruleSets
2835 while (index > non_zero_index)
2837 out->ruleSet.pop ();
2841 return_trace (bool (out->ruleSet));
2844 bool sanitize (hb_sanitize_context_t *c) const
2846 TRACE_SANITIZE (this);
2847 return_trace (coverage.sanitize (c, this) &&
2848 backtrackClassDef.sanitize (c, this) &&
2849 inputClassDef.sanitize (c, this) &&
2850 lookaheadClassDef.sanitize (c, this) &&
2851 ruleSet.sanitize (c, this));
2855 HBUINT16 format; /* Format identifier--format = 2 */
2857 coverage; /* Offset to Coverage table--from
2858 * beginning of table */
2860 backtrackClassDef; /* Offset to glyph ClassDef table
2861 * containing backtrack sequence
2862 * data--from beginning of table */
2864 inputClassDef; /* Offset to glyph ClassDef
2865 * table containing input sequence
2866 * data--from beginning of table */
2868 lookaheadClassDef; /* Offset to glyph ClassDef table
2869 * containing lookahead sequence
2870 * data--from beginning of table */
2871 OffsetArrayOf<ChainRuleSet>
2872 ruleSet; /* Array of ChainRuleSet tables
2873 * ordered by class */
2875 DEFINE_SIZE_ARRAY (12, ruleSet);
2878 struct ChainContextFormat3
2880 bool intersects (const hb_set_t *glyphs) const
2882 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2884 if (!(this+input[0]).intersects (glyphs))
2887 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2888 struct ChainContextClosureLookupContext lookup_context = {
2889 {intersects_coverage},
2892 return chain_context_intersects (glyphs,
2893 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2894 input.len, (const HBUINT16 *) input.arrayZ + 1,
2895 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2899 void closure (hb_closure_context_t *c) const
2901 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2903 if (!(this+input[0]).intersects (c->glyphs))
2906 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2907 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2908 struct ChainContextClosureLookupContext lookup_context = {
2909 {intersects_coverage},
2912 chain_context_closure_lookup (c,
2913 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2914 input.len, (const HBUINT16 *) input.arrayZ + 1,
2915 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2916 lookup.len, lookup.arrayZ,
2920 void closure_lookups (hb_closure_lookups_context_t *c) const
2922 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2923 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2924 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2925 recurse_lookups (c, lookup.len, lookup.arrayZ);
2928 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2930 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2932 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2934 (this+input[0]).collect_coverage (c->input);
2936 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2937 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2938 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2942 chain_context_collect_glyphs_lookup (c,
2943 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2944 input.len, (const HBUINT16 *) input.arrayZ + 1,
2945 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2946 lookup.len, lookup.arrayZ,
2950 bool would_apply (hb_would_apply_context_t *c) const
2952 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2953 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2954 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2955 struct ChainContextApplyLookupContext lookup_context = {
2959 return chain_context_would_apply_lookup (c,
2960 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2961 input.len, (const HBUINT16 *) input.arrayZ + 1,
2962 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2963 lookup.len, lookup.arrayZ, lookup_context);
2966 const Coverage &get_coverage () const
2968 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2969 return this+input[0];
2972 bool apply (hb_ot_apply_context_t *c) const
2975 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2977 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
2978 if (likely (index == NOT_COVERED)) return_trace (false);
2980 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2981 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2982 struct ChainContextApplyLookupContext lookup_context = {
2986 return_trace (chain_context_apply_lookup (c,
2987 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2988 input.len, (const HBUINT16 *) input.arrayZ + 1,
2989 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2990 lookup.len, lookup.arrayZ, lookup_context));
2993 template<typename Iterator,
2994 hb_requires (hb_is_iterator (Iterator))>
2995 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
2997 TRACE_SERIALIZE (this);
2998 auto *out = c->serializer->start_embed<OffsetArrayOf<Coverage>> ();
3000 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size))) return_trace (false);
3003 | hb_apply (subset_offset_array (c, *out, base))
3006 return_trace (out->len);
3009 bool subset (hb_subset_context_t *c) const
3011 TRACE_SUBSET (this);
3013 auto *out = c->serializer->start_embed (this);
3014 if (unlikely (!out)) return_trace (false);
3015 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3017 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3018 return_trace (false);
3020 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
3021 if (!serialize_coverage_offsets (c, input.iter (), this))
3022 return_trace (false);
3024 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
3025 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3026 return_trace (false);
3028 const ArrayOf<LookupRecord> &lookupRecord = StructAfter<ArrayOf<LookupRecord>> (lookahead);
3029 HBUINT16 lookupCount;
3030 lookupCount = lookupRecord.len;
3031 if (!c->serializer->copy (lookupCount)) return_trace (false);
3033 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3034 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
3035 if (!c->serializer->copy (lookupRecord[i], lookup_map)) return_trace (false);
3037 return_trace (true);
3040 bool sanitize (hb_sanitize_context_t *c) const
3042 TRACE_SANITIZE (this);
3043 if (!backtrack.sanitize (c, this)) return_trace (false);
3044 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
3045 if (!input.sanitize (c, this)) return_trace (false);
3046 if (!input.len) return_trace (false); /* To be consistent with Context. */
3047 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
3048 if (!lookahead.sanitize (c, this)) return_trace (false);
3049 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
3050 return_trace (lookup.sanitize (c));
3054 HBUINT16 format; /* Format identifier--format = 3 */
3055 OffsetArrayOf<Coverage>
3056 backtrack; /* Array of coverage tables
3057 * in backtracking sequence, in glyph
3059 OffsetArrayOf<Coverage>
3060 inputX ; /* Array of coverage
3061 * tables in input sequence, in glyph
3063 OffsetArrayOf<Coverage>
3064 lookaheadX; /* Array of coverage tables
3065 * in lookahead sequence, in glyph
3067 ArrayOf<LookupRecord>
3068 lookupX; /* Array of LookupRecords--in
3071 DEFINE_SIZE_MIN (10);
3076 template <typename context_t, typename ...Ts>
3077 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3079 TRACE_DISPATCH (this, u.format);
3080 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3082 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
3083 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
3084 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
3085 default:return_trace (c->default_return_value ());
3091 HBUINT16 format; /* Format identifier */
3092 ChainContextFormat1 format1;
3093 ChainContextFormat2 format2;
3094 ChainContextFormat3 format3;
3099 template <typename T>
3100 struct ExtensionFormat1
3102 unsigned int get_type () const { return extensionLookupType; }
3104 template <typename X>
3105 const X& get_subtable () const
3106 { return this + reinterpret_cast<const LOffsetTo<typename T::SubTable> &> (extensionOffset); }
3108 template <typename context_t, typename ...Ts>
3109 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3111 TRACE_DISPATCH (this, format);
3112 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3113 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), hb_forward<Ts> (ds)...));
3116 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3119 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
3120 bool sanitize (hb_sanitize_context_t *c) const
3122 TRACE_SANITIZE (this);
3123 return_trace (c->check_struct (this) &&
3124 extensionLookupType != T::SubTable::Extension);
3128 HBUINT16 format; /* Format identifier. Set to 1. */
3129 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3130 * by ExtensionOffset (i.e. the
3131 * extension subtable). */
3132 Offset32 extensionOffset; /* Offset to the extension subtable,
3133 * of lookup type subtable. */
3135 DEFINE_SIZE_STATIC (8);
3138 template <typename T>
3141 unsigned int get_type () const
3144 case 1: return u.format1.get_type ();
3148 template <typename X>
3149 const X& get_subtable () const
3152 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3153 default:return Null (typename T::SubTable);
3157 template <typename context_t, typename ...Ts>
3158 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3160 TRACE_DISPATCH (this, u.format);
3161 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3163 case 1: return_trace (u.format1.dispatch (c, hb_forward<Ts> (ds)...));
3164 default:return_trace (c->default_return_value ());
3170 HBUINT16 format; /* Format identifier */
3171 ExtensionFormat1<T> format1;
3180 struct hb_ot_layout_lookup_accelerator_t
3182 template <typename TLookup>
3183 void init (const TLookup &lookup)
3186 lookup.collect_coverage (&digest);
3189 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3190 lookup.dispatch (&c_get_subtables);
3192 void fini () { subtables.fini (); }
3194 bool may_have (hb_codepoint_t g) const
3195 { return digest.may_have (g); }
3197 bool apply (hb_ot_apply_context_t *c) const
3199 for (unsigned int i = 0; i < subtables.length; i++)
3200 if (subtables[i].apply (c))
3206 hb_set_digest_t digest;
3207 hb_get_subtables_context_t::array_t subtables;
3212 bool has_data () const { return version.to_int (); }
3213 unsigned int get_script_count () const
3214 { return (this+scriptList).len; }
3215 const Tag& get_script_tag (unsigned int i) const
3216 { return (this+scriptList).get_tag (i); }
3217 unsigned int get_script_tags (unsigned int start_offset,
3218 unsigned int *script_count /* IN/OUT */,
3219 hb_tag_t *script_tags /* OUT */) const
3220 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
3221 const Script& get_script (unsigned int i) const
3222 { return (this+scriptList)[i]; }
3223 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3224 { return (this+scriptList).find_index (tag, index); }
3226 unsigned int get_feature_count () const
3227 { return (this+featureList).len; }
3228 hb_tag_t get_feature_tag (unsigned int i) const
3229 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
3230 unsigned int get_feature_tags (unsigned int start_offset,
3231 unsigned int *feature_count /* IN/OUT */,
3232 hb_tag_t *feature_tags /* OUT */) const
3233 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
3234 const Feature& get_feature (unsigned int i) const
3235 { return (this+featureList)[i]; }
3236 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3237 { return (this+featureList).find_index (tag, index); }
3239 unsigned int get_lookup_count () const
3240 { return (this+lookupList).len; }
3241 const Lookup& get_lookup (unsigned int i) const
3242 { return (this+lookupList)[i]; }
3244 bool find_variations_index (const int *coords, unsigned int num_coords,
3245 unsigned int *index) const
3248 *index = FeatureVariations::NOT_FOUND_INDEX;
3251 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3252 .find_index (coords, num_coords, index);
3254 const Feature& get_feature_variation (unsigned int feature_index,
3255 unsigned int variations_index) const
3258 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3259 version.to_int () >= 0x00010001u)
3261 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3267 return get_feature (feature_index);
3270 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3271 hb_set_t *lookup_indexes /* OUT */) const
3274 if (version.to_int () >= 0x00010001u)
3275 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3279 template <typename TLookup>
3280 bool subset (hb_subset_layout_context_t *c) const
3282 TRACE_SUBSET (this);
3283 auto *out = c->subset_context->serializer->embed (*this);
3284 if (unlikely (!out)) return_trace (false);
3286 typedef LookupOffsetList<TLookup> TLookupList;
3287 reinterpret_cast<OffsetTo<TLookupList> &> (out->lookupList)
3288 .serialize_subset (c->subset_context,
3289 reinterpret_cast<const OffsetTo<TLookupList> &> (lookupList),
3293 reinterpret_cast<OffsetTo<RecordListOfFeature> &> (out->featureList)
3294 .serialize_subset (c->subset_context,
3295 reinterpret_cast<const OffsetTo<RecordListOfFeature> &> (featureList),
3299 out->scriptList.serialize_subset (c->subset_context,
3305 if (version.to_int () >= 0x00010001u)
3307 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3310 out->version.major = 1;
3311 out->version.minor = 0;
3316 return_trace (true);
3319 void closure_features (const hb_map_t *lookup_indexes, /* IN */
3320 hb_set_t *feature_indexes /* OUT */) const
3322 unsigned int feature_count = hb_min (get_feature_count (), (unsigned) HB_MAX_FEATURES);
3323 for (unsigned i = 0; i < feature_count; i++)
3325 const Feature& f = get_feature (i);
3326 if ((!f.featureParams.is_null ()) || f.intersects_lookup_indexes (lookup_indexes))
3327 feature_indexes->add (i);
3330 if (version.to_int () >= 0x00010001u)
3331 (this+featureVars).closure_features (lookup_indexes, feature_indexes);
3335 unsigned int get_size () const
3338 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3341 template <typename TLookup>
3342 bool sanitize (hb_sanitize_context_t *c) const
3344 TRACE_SANITIZE (this);
3345 typedef OffsetListOf<TLookup> TLookupList;
3346 if (unlikely (!(version.sanitize (c) &&
3347 likely (version.major == 1) &&
3348 scriptList.sanitize (c, this) &&
3349 featureList.sanitize (c, this) &&
3350 reinterpret_cast<const OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
3351 return_trace (false);
3354 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3355 return_trace (false);
3358 return_trace (true);
3361 template <typename T>
3362 struct accelerator_t
3364 void init (hb_face_t *face)
3366 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3367 if (unlikely (this->table->is_blacklisted (this->table.get_blob (), face)))
3369 hb_blob_destroy (this->table.get_blob ());
3370 this->table = hb_blob_get_empty ();
3373 this->lookup_count = table->get_lookup_count ();
3375 this->accels = (hb_ot_layout_lookup_accelerator_t *) calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3376 if (unlikely (!this->accels))
3377 this->lookup_count = 0;
3379 for (unsigned int i = 0; i < this->lookup_count; i++)
3380 this->accels[i].init (table->get_lookup (i));
3385 for (unsigned int i = 0; i < this->lookup_count; i++)
3386 this->accels[i].fini ();
3387 free (this->accels);
3388 this->table.destroy ();
3391 hb_blob_ptr_t<T> table;
3392 unsigned int lookup_count;
3393 hb_ot_layout_lookup_accelerator_t *accels;
3397 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3399 OffsetTo<ScriptList>
3400 scriptList; /* ScriptList table */
3401 OffsetTo<FeatureList>
3402 featureList; /* FeatureList table */
3403 OffsetTo<LookupList>
3404 lookupList; /* LookupList table */
3405 LOffsetTo<FeatureVariations>
3406 featureVars; /* Offset to Feature Variations
3407 table--from beginning of table
3408 * (may be NULL). Introduced
3409 * in version 0x00010001. */
3411 DEFINE_SIZE_MIN (10);
3415 } /* namespace OT */
3418 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */