2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
33 #include "hb-buffer.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool, 0>
47 const char *get_name () { return "INTERSECTS"; }
49 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
50 static return_t default_return_value () { return false; }
51 bool stop_sublookup_iteration (return_t r) const { return r; }
53 const hb_set_t *glyphs;
54 unsigned int debug_depth;
56 hb_intersects_context_t (const hb_set_t *glyphs_) :
61 struct hb_closure_context_t :
62 hb_dispatch_context_t<hb_closure_context_t, hb_empty_t, 0>
64 const char *get_name () { return "CLOSURE"; }
65 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
67 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
68 static return_t default_return_value () { return hb_empty_t (); }
69 void recurse (unsigned int lookup_index)
71 if (unlikely (nesting_level_left == 0 || !recurse_func))
75 recurse_func (this, lookup_index);
79 bool lookup_limit_exceeded ()
80 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
82 bool should_visit_lookup (unsigned int lookup_index)
84 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
87 if (is_lookup_done (lookup_index))
90 done_lookups->set (lookup_index, glyphs->get_population ());
94 bool is_lookup_done (unsigned int lookup_index)
96 /* Have we visited this lookup with the current set of glyphs? */
97 return done_lookups->get (lookup_index) == glyphs->get_population ();
103 recurse_func_t recurse_func;
104 unsigned int nesting_level_left;
105 unsigned int debug_depth;
107 hb_closure_context_t (hb_face_t *face_,
109 hb_map_t *done_lookups_,
110 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
113 recurse_func (nullptr),
114 nesting_level_left (nesting_level_left_),
116 done_lookups (done_lookups_),
120 ~hb_closure_context_t () { flush (); }
122 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
126 hb_set_del_range (output, face->get_num_glyphs (), hb_set_get_max (output)); /* Remove invalid glyphs. */
127 hb_set_union (glyphs, output);
128 hb_set_clear (output);
132 hb_map_t *done_lookups;
133 unsigned int lookup_count;
136 struct hb_closure_lookups_context_t :
137 hb_dispatch_context_t<hb_closure_lookups_context_t, hb_empty_t, 0>
139 const char *get_name () { return "CLOSURE_LOOKUPS"; }
140 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
141 template <typename T>
142 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
143 static return_t default_return_value () { return hb_empty_t (); }
144 void recurse (unsigned lookup_index)
146 if (unlikely (nesting_level_left == 0 || !recurse_func))
149 /* Return if new lookup was recursed to before. */
150 if (is_lookup_visited (lookup_index))
153 set_lookup_visited (lookup_index);
154 nesting_level_left--;
155 recurse_func (this, lookup_index);
156 nesting_level_left++;
159 void set_lookup_visited (unsigned lookup_index)
160 { visited_lookups->add (lookup_index); }
162 void set_lookup_inactive (unsigned lookup_index)
163 { inactive_lookups->add (lookup_index); }
165 bool lookup_limit_exceeded ()
166 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
168 bool is_lookup_visited (unsigned lookup_index)
170 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
173 return visited_lookups->has (lookup_index);
177 const hb_set_t *glyphs;
178 recurse_func_t recurse_func;
179 unsigned int nesting_level_left;
180 unsigned int debug_depth;
182 hb_closure_lookups_context_t (hb_face_t *face_,
183 const hb_set_t *glyphs_,
184 hb_set_t *visited_lookups_,
185 hb_set_t *inactive_lookups_,
186 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
189 recurse_func (nullptr),
190 nesting_level_left (nesting_level_left_),
192 visited_lookups (visited_lookups_),
193 inactive_lookups (inactive_lookups_),
196 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
199 hb_set_t *visited_lookups;
200 hb_set_t *inactive_lookups;
201 unsigned int lookup_count;
204 struct hb_would_apply_context_t :
205 hb_dispatch_context_t<hb_would_apply_context_t, bool, 0>
207 const char *get_name () { return "WOULD_APPLY"; }
208 template <typename T>
209 return_t dispatch (const T &obj) { return obj.would_apply (this); }
210 static return_t default_return_value () { return false; }
211 bool stop_sublookup_iteration (return_t r) const { return r; }
214 const hb_codepoint_t *glyphs;
217 unsigned int debug_depth;
219 hb_would_apply_context_t (hb_face_t *face_,
220 const hb_codepoint_t *glyphs_,
222 bool zero_context_) :
226 zero_context (zero_context_),
231 struct hb_collect_glyphs_context_t :
232 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_empty_t, 0>
234 const char *get_name () { return "COLLECT_GLYPHS"; }
235 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
236 template <typename T>
237 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
238 static return_t default_return_value () { return hb_empty_t (); }
239 void recurse (unsigned int lookup_index)
241 if (unlikely (nesting_level_left == 0 || !recurse_func))
244 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
245 * past the previous check. For GSUB, we only want to collect the output
246 * glyphs in the recursion. If output is not requested, we can go home now.
248 * Note further, that the above is not exactly correct. A recursed lookup
249 * is allowed to match input that is not matched in the context, but that's
250 * not how most fonts are built. It's possible to relax that and recurse
251 * with all sets here if it proves to be an issue.
254 if (output == hb_set_get_empty ())
257 /* Return if new lookup was recursed to before. */
258 if (recursed_lookups->has (lookup_index))
261 hb_set_t *old_before = before;
262 hb_set_t *old_input = input;
263 hb_set_t *old_after = after;
264 before = input = after = hb_set_get_empty ();
266 nesting_level_left--;
267 recurse_func (this, lookup_index);
268 nesting_level_left++;
274 recursed_lookups->add (lookup_index);
282 recurse_func_t recurse_func;
283 hb_set_t *recursed_lookups;
284 unsigned int nesting_level_left;
285 unsigned int debug_depth;
287 hb_collect_glyphs_context_t (hb_face_t *face_,
288 hb_set_t *glyphs_before, /* OUT. May be NULL */
289 hb_set_t *glyphs_input, /* OUT. May be NULL */
290 hb_set_t *glyphs_after, /* OUT. May be NULL */
291 hb_set_t *glyphs_output, /* OUT. May be NULL */
292 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
294 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
295 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
296 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
297 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
298 recurse_func (nullptr),
299 recursed_lookups (hb_set_create ()),
300 nesting_level_left (nesting_level_left_),
302 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
304 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
309 template <typename set_t>
310 struct hb_collect_coverage_context_t :
311 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE>
313 const char *get_name () { return "GET_COVERAGE"; }
314 typedef const Coverage &return_t;
315 template <typename T>
316 return_t dispatch (const T &obj) { return obj.get_coverage (); }
317 static return_t default_return_value () { return Null (Coverage); }
318 bool stop_sublookup_iteration (return_t r) const
320 r.collect_coverage (set);
324 hb_collect_coverage_context_t (set_t *set_) :
329 unsigned int debug_depth;
333 struct hb_ot_apply_context_t :
334 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
343 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
346 match_func (nullptr),
347 match_data (nullptr) {}
349 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
351 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
352 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
353 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
354 void set_mask (hb_mask_t mask_) { mask = mask_; }
355 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
356 void set_match_func (match_func_t match_func_,
357 const void *match_data_)
358 { match_func = match_func_; match_data = match_data_; }
366 may_match_t may_match (const hb_glyph_info_t &info,
367 const HBUINT16 *glyph_data) const
369 if (!(info.mask & mask) ||
370 (syllable && syllable != info.syllable ()))
374 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
385 may_skip_t may_skip (const hb_ot_apply_context_t *c,
386 const hb_glyph_info_t &info) const
388 if (!c->check_glyph_property (&info, lookup_props))
391 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
392 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
393 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
400 unsigned int lookup_props;
405 match_func_t match_func;
406 const void *match_data;
409 struct skipping_iterator_t
411 void init (hb_ot_apply_context_t *c_, bool context_match = false)
414 match_glyph_data = nullptr;
415 matcher.set_match_func (nullptr, nullptr);
416 matcher.set_lookup_props (c->lookup_props);
417 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
418 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
419 /* Ignore ZWJ if we are matching context, or asked to. */
420 matcher.set_ignore_zwj (context_match || c->auto_zwj);
421 matcher.set_mask (context_match ? -1 : c->lookup_mask);
423 void set_lookup_props (unsigned int lookup_props)
425 matcher.set_lookup_props (lookup_props);
427 void set_match_func (matcher_t::match_func_t match_func_,
428 const void *match_data_,
429 const HBUINT16 glyph_data[])
431 matcher.set_match_func (match_func_, match_data_);
432 match_glyph_data = glyph_data;
435 void reset (unsigned int start_index_,
436 unsigned int num_items_)
439 num_items = num_items_;
440 end = c->buffer->len;
441 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
447 if (match_glyph_data) match_glyph_data--;
450 matcher_t::may_skip_t
451 may_skip (const hb_glyph_info_t &info) const
452 { return matcher.may_skip (c, info); }
456 assert (num_items > 0);
457 while (idx + num_items < end)
460 const hb_glyph_info_t &info = c->buffer->info[idx];
462 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
463 if (unlikely (skip == matcher_t::SKIP_YES))
466 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
467 if (match == matcher_t::MATCH_YES ||
468 (match == matcher_t::MATCH_MAYBE &&
469 skip == matcher_t::SKIP_NO))
472 if (match_glyph_data) match_glyph_data++;
476 if (skip == matcher_t::SKIP_NO)
483 assert (num_items > 0);
484 while (idx > num_items - 1)
487 const hb_glyph_info_t &info = c->buffer->out_info[idx];
489 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
490 if (unlikely (skip == matcher_t::SKIP_YES))
493 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
494 if (match == matcher_t::MATCH_YES ||
495 (match == matcher_t::MATCH_MAYBE &&
496 skip == matcher_t::SKIP_NO))
499 if (match_glyph_data) match_glyph_data++;
503 if (skip == matcher_t::SKIP_NO)
511 hb_ot_apply_context_t *c;
513 const HBUINT16 *match_glyph_data;
515 unsigned int num_items;
520 const char *get_name () { return "APPLY"; }
521 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
522 template <typename T>
523 return_t dispatch (const T &obj) { return obj.apply (this); }
524 static return_t default_return_value () { return false; }
525 bool stop_sublookup_iteration (return_t r) const { return r; }
526 return_t recurse (unsigned int sub_lookup_index)
528 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
529 return default_return_value ();
531 nesting_level_left--;
532 bool ret = recurse_func (this, sub_lookup_index);
533 nesting_level_left++;
537 skipping_iterator_t iter_input, iter_context;
542 recurse_func_t recurse_func;
544 const VariationStore &var_store;
546 hb_direction_t direction;
547 hb_mask_t lookup_mask;
548 unsigned int table_index; /* GSUB/GPOS */
549 unsigned int lookup_index;
550 unsigned int lookup_props;
551 unsigned int nesting_level_left;
552 unsigned int debug_depth;
554 bool has_glyph_classes;
559 uint32_t random_state;
562 signed last_base = -1; // GPOS uses
563 unsigned last_base_until = 0; // GPOS uses
565 hb_ot_apply_context_t (unsigned int table_index_,
567 hb_buffer_t *buffer_) :
568 iter_input (), iter_context (),
569 font (font_), face (font->face), buffer (buffer_),
570 recurse_func (nullptr),
572 #ifndef HB_NO_OT_LAYOUT
573 *face->table.GDEF->table
578 var_store (gdef.get_var_store ()),
579 direction (buffer_->props.direction),
581 table_index (table_index_),
582 lookup_index ((unsigned int) -1),
584 nesting_level_left (HB_MAX_NESTING_LEVEL),
586 has_glyph_classes (gdef.has_glyph_classes ()),
590 random_state (1) { init_iters (); }
594 iter_input.init (this, false);
595 iter_context.init (this, true);
598 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; last_base = -1; last_base_until = 0; init_iters (); }
599 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
600 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
601 void set_random (bool random_) { random = random_; }
602 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
603 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
604 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
606 uint32_t random_number ()
608 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
609 random_state = random_state * 48271 % 2147483647;
613 bool match_properties_mark (hb_codepoint_t glyph,
614 unsigned int glyph_props,
615 unsigned int match_props) const
617 /* If using mark filtering sets, the high short of
618 * match_props has the set index.
620 if (match_props & LookupFlag::UseMarkFilteringSet)
621 return gdef.mark_set_covers (match_props >> 16, glyph);
623 /* The second byte of match_props has the meaning
624 * "ignore marks of attachment type different than
625 * the attachment type specified."
627 if (match_props & LookupFlag::MarkAttachmentType)
628 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
633 bool check_glyph_property (const hb_glyph_info_t *info,
634 unsigned int match_props) const
636 hb_codepoint_t glyph = info->codepoint;
637 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
639 /* Not covered, if, for example, glyph class is ligature and
640 * match_props includes LookupFlags::IgnoreLigatures
642 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
645 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
646 return match_properties_mark (glyph, glyph_props, match_props);
651 void _set_glyph_class (hb_codepoint_t glyph_index,
652 unsigned int class_guess = 0,
653 bool ligature = false,
654 bool component = false) const
656 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
658 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
661 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
662 /* In the only place that the MULTIPLIED bit is used, Uniscribe
663 * seems to only care about the "last" transformation between
664 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
665 * and ligate again, it forgives the multiplication and acts as
666 * if only ligation happened. As such, clear MULTIPLIED bit.
668 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
671 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
673 if (likely (has_glyph_classes))
674 props = (props & ~HB_OT_LAYOUT_GLYPH_PROPS_CLASS_MASK) | gdef.get_glyph_props (glyph_index);
675 else if (class_guess)
676 props = (props & ~HB_OT_LAYOUT_GLYPH_PROPS_CLASS_MASK) | class_guess;
678 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
681 void replace_glyph (hb_codepoint_t glyph_index) const
683 _set_glyph_class (glyph_index);
684 buffer->replace_glyph (glyph_index);
686 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
688 _set_glyph_class (glyph_index);
689 buffer->cur().codepoint = glyph_index;
691 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
692 unsigned int class_guess) const
694 _set_glyph_class (glyph_index, class_guess, true);
695 buffer->replace_glyph (glyph_index);
697 void output_glyph_for_component (hb_codepoint_t glyph_index,
698 unsigned int class_guess) const
700 _set_glyph_class (glyph_index, class_guess, false, true);
701 buffer->output_glyph (glyph_index);
706 struct hb_get_subtables_context_t :
707 hb_dispatch_context_t<hb_get_subtables_context_t, hb_empty_t, HB_DEBUG_APPLY>
709 template <typename Type>
710 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
712 const Type *typed_obj = (const Type *) obj;
713 return typed_obj->apply (c);
716 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
718 struct hb_applicable_t
720 template <typename T>
721 void init (const T &obj_, hb_apply_func_t apply_func_)
724 apply_func = apply_func_;
726 obj_.get_coverage ().collect_coverage (&digest);
729 bool apply (OT::hb_ot_apply_context_t *c) const
731 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
736 hb_apply_func_t apply_func;
737 hb_set_digest_t digest;
740 typedef hb_vector_t<hb_applicable_t> array_t;
742 /* Dispatch interface. */
743 const char *get_name () { return "GET_SUBTABLES"; }
744 template <typename T>
745 return_t dispatch (const T &obj)
747 hb_applicable_t *entry = array.push();
748 entry->init (obj, apply_to<T>);
749 return hb_empty_t ();
751 static return_t default_return_value () { return hb_empty_t (); }
753 hb_get_subtables_context_t (array_t &array_) :
758 unsigned int debug_depth;
764 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
765 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
766 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
768 struct ContextClosureFuncs
770 intersects_func_t intersects;
772 struct ContextCollectGlyphsFuncs
774 collect_glyphs_func_t collect;
776 struct ContextApplyFuncs
782 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
784 return glyphs->has (value);
786 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
788 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
789 return class_def.intersects_class (glyphs, value);
791 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
793 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
794 return (data+coverage).intersects (glyphs);
797 static inline bool array_is_subset_of (const hb_set_t *glyphs,
799 const HBUINT16 values[],
800 intersects_func_t intersects_func,
801 const void *intersects_data)
803 for (const HBUINT16 &_ : + hb_iter (values, count))
804 if (!intersects_func (glyphs, _, intersects_data)) return false;
809 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
813 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
815 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
816 class_def.collect_class (glyphs, value);
818 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
820 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
821 (data+coverage).collect_coverage (glyphs);
823 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
826 const HBUINT16 values[],
827 collect_glyphs_func_t collect_func,
828 const void *collect_data)
831 + hb_iter (values, count)
832 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
837 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
839 return glyph_id == value;
841 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
843 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
844 return class_def.get_class (glyph_id) == value;
846 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
848 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
849 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
852 static inline bool would_match_input (hb_would_apply_context_t *c,
853 unsigned int count, /* Including the first glyph (not matched) */
854 const HBUINT16 input[], /* Array of input values--start with second glyph */
855 match_func_t match_func,
856 const void *match_data)
861 for (unsigned int i = 1; i < count; i++)
862 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
867 static inline bool match_input (hb_ot_apply_context_t *c,
868 unsigned int count, /* Including the first glyph (not matched) */
869 const HBUINT16 input[], /* Array of input values--start with second glyph */
870 match_func_t match_func,
871 const void *match_data,
872 unsigned int *end_offset,
873 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
874 unsigned int *p_total_component_count = nullptr)
876 TRACE_APPLY (nullptr);
878 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
880 hb_buffer_t *buffer = c->buffer;
882 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
883 skippy_iter.reset (buffer->idx, count - 1);
884 skippy_iter.set_match_func (match_func, match_data, input);
887 * This is perhaps the trickiest part of OpenType... Remarks:
889 * - If all components of the ligature were marks, we call this a mark ligature.
891 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
892 * it as a ligature glyph.
894 * - Ligatures cannot be formed across glyphs attached to different components
895 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
896 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
897 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
898 * There are a couple of exceptions to this:
900 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
901 * assuming that the font designer knows what they are doing (otherwise it can
902 * break Indic stuff when a matra wants to ligate with a conjunct,
904 * o If two marks want to ligate and they belong to different components of the
905 * same ligature glyph, and said ligature glyph is to be ignored according to
906 * mark-filtering rules, then allow.
907 * https://github.com/harfbuzz/harfbuzz/issues/545
910 unsigned int total_component_count = 0;
911 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
913 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
914 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
918 LIGBASE_MAY_NOT_SKIP,
920 } ligbase = LIGBASE_NOT_CHECKED;
922 match_positions[0] = buffer->idx;
923 for (unsigned int i = 1; i < count; i++)
925 if (!skippy_iter.next ()) return_trace (false);
927 match_positions[i] = skippy_iter.idx;
929 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
930 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
932 if (first_lig_id && first_lig_comp)
934 /* If first component was attached to a previous ligature component,
935 * all subsequent components should be attached to the same ligature
936 * component, otherwise we shouldn't ligate them... */
937 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
939 /* ...unless, we are attached to a base ligature and that base
940 * ligature is ignorable. */
941 if (ligbase == LIGBASE_NOT_CHECKED)
944 const auto *out = buffer->out_info;
945 unsigned int j = buffer->out_len;
946 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
948 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
957 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
958 ligbase = LIGBASE_MAY_SKIP;
960 ligbase = LIGBASE_MAY_NOT_SKIP;
963 if (ligbase == LIGBASE_MAY_NOT_SKIP)
964 return_trace (false);
969 /* If first component was NOT attached to a previous ligature component,
970 * all subsequent components should also NOT be attached to any ligature
971 * component, unless they are attached to the first component itself! */
972 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
973 return_trace (false);
976 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
979 *end_offset = skippy_iter.idx - buffer->idx + 1;
981 if (p_total_component_count)
982 *p_total_component_count = total_component_count;
986 static inline bool ligate_input (hb_ot_apply_context_t *c,
987 unsigned int count, /* Including the first glyph */
988 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
989 unsigned int match_length,
990 hb_codepoint_t lig_glyph,
991 unsigned int total_component_count)
993 TRACE_APPLY (nullptr);
995 hb_buffer_t *buffer = c->buffer;
997 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
999 /* - If a base and one or more marks ligate, consider that as a base, NOT
1000 * ligature, such that all following marks can still attach to it.
1001 * https://github.com/harfbuzz/harfbuzz/issues/1109
1003 * - If all components of the ligature were marks, we call this a mark ligature.
1004 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1005 * the ligature to keep its old ligature id. This will allow it to attach to
1006 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1007 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1008 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1009 * later, we don't want them to lose their ligature id/component, otherwise
1010 * GPOS will fail to correctly position the mark ligature on top of the
1011 * LAM,LAM,HEH ligature. See:
1012 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1014 * - If a ligature is formed of components that some of which are also ligatures
1015 * themselves, and those ligature components had marks attached to *their*
1016 * components, we have to attach the marks to the new ligature component
1017 * positions! Now *that*'s tricky! And these marks may be following the
1018 * last component of the whole sequence, so we should loop forward looking
1019 * for them and update them.
1021 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1022 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1023 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1024 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1025 * the new ligature with a component value of 2.
1027 * This in fact happened to a font... See:
1028 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1031 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1032 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1033 for (unsigned int i = 1; i < count; i++)
1034 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1036 is_base_ligature = false;
1037 is_mark_ligature = false;
1040 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1042 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1043 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1044 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1045 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1046 unsigned int components_so_far = last_num_components;
1050 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1051 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1053 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1056 c->replace_glyph_with_ligature (lig_glyph, klass);
1058 for (unsigned int i = 1; i < count; i++)
1060 while (buffer->idx < match_positions[i] && buffer->successful)
1064 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1066 this_comp = last_num_components;
1067 unsigned int new_lig_comp = components_so_far - last_num_components +
1068 hb_min (this_comp, last_num_components);
1069 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1071 buffer->next_glyph ();
1074 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1075 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1076 components_so_far += last_num_components;
1078 /* Skip the base glyph */
1082 if (!is_mark_ligature && last_lig_id)
1084 /* Re-adjust components for any marks following. */
1085 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1087 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1089 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1090 if (!this_comp) break;
1092 unsigned new_lig_comp = components_so_far - last_num_components +
1093 hb_min (this_comp, last_num_components);
1094 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1097 return_trace (true);
1100 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1102 const HBUINT16 backtrack[],
1103 match_func_t match_func,
1104 const void *match_data,
1105 unsigned int *match_start)
1107 TRACE_APPLY (nullptr);
1109 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1110 skippy_iter.reset (c->buffer->backtrack_len (), count);
1111 skippy_iter.set_match_func (match_func, match_data, backtrack);
1113 for (unsigned int i = 0; i < count; i++)
1114 if (!skippy_iter.prev ())
1115 return_trace (false);
1117 *match_start = skippy_iter.idx;
1119 return_trace (true);
1122 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1124 const HBUINT16 lookahead[],
1125 match_func_t match_func,
1126 const void *match_data,
1127 unsigned int offset,
1128 unsigned int *end_index)
1130 TRACE_APPLY (nullptr);
1132 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1133 skippy_iter.reset (c->buffer->idx + offset - 1, count);
1134 skippy_iter.set_match_func (match_func, match_data, lookahead);
1136 for (unsigned int i = 0; i < count; i++)
1137 if (!skippy_iter.next ())
1138 return_trace (false);
1140 *end_index = skippy_iter.idx + 1;
1142 return_trace (true);
1149 LookupRecord* copy (hb_serialize_context_t *c,
1150 const hb_map_t *lookup_map) const
1152 TRACE_SERIALIZE (this);
1153 auto *out = c->embed (*this);
1154 if (unlikely (!out)) return_trace (nullptr);
1156 out->lookupListIndex = hb_map_get (lookup_map, lookupListIndex);
1160 bool sanitize (hb_sanitize_context_t *c) const
1162 TRACE_SANITIZE (this);
1163 return_trace (c->check_struct (this));
1166 HBUINT16 sequenceIndex; /* Index into current glyph
1167 * sequence--first glyph = 0 */
1168 HBUINT16 lookupListIndex; /* Lookup to apply to that
1169 * position--zero--based */
1171 DEFINE_SIZE_STATIC (4);
1174 template <typename context_t>
1175 static inline void recurse_lookups (context_t *c,
1176 unsigned int lookupCount,
1177 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1179 for (unsigned int i = 0; i < lookupCount; i++)
1180 c->recurse (lookupRecord[i].lookupListIndex);
1183 static inline bool apply_lookup (hb_ot_apply_context_t *c,
1184 unsigned int count, /* Including the first glyph */
1185 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1186 unsigned int lookupCount,
1187 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1188 unsigned int match_length)
1190 TRACE_APPLY (nullptr);
1192 hb_buffer_t *buffer = c->buffer;
1195 /* All positions are distance from beginning of *output* buffer.
1198 unsigned int bl = buffer->backtrack_len ();
1199 end = bl + match_length;
1201 int delta = bl - buffer->idx;
1202 /* Convert positions to new indexing. */
1203 for (unsigned int j = 0; j < count; j++)
1204 match_positions[j] += delta;
1207 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1209 unsigned int idx = lookupRecord[i].sequenceIndex;
1213 /* Don't recurse to ourself at same position.
1214 * Note that this test is too naive, it doesn't catch longer loops. */
1215 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index)
1218 if (unlikely (!buffer->move_to (match_positions[idx])))
1221 if (unlikely (buffer->max_ops <= 0))
1224 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1225 if (!c->recurse (lookupRecord[i].lookupListIndex))
1228 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1229 int delta = new_len - orig_len;
1234 /* Recursed lookup changed buffer len. Adjust.
1238 * Right now, if buffer length increased by n, we assume n new glyphs
1239 * were added right after the current position, and if buffer length
1240 * was decreased by n, we assume n match positions after the current
1241 * one where removed. The former (buffer length increased) case is
1242 * fine, but the decrease case can be improved in at least two ways,
1243 * both of which are significant:
1245 * - If recursed-to lookup is MultipleSubst and buffer length
1246 * decreased, then it's current match position that was deleted,
1247 * NOT the one after it.
1249 * - If buffer length was decreased by n, it does not necessarily
1250 * mean that n match positions where removed, as there might
1251 * have been marks and default-ignorables in the sequence. We
1252 * should instead drop match positions between current-position
1253 * and current-position + n instead.
1255 * It should be possible to construct tests for both of these cases.
1259 if (end <= int (match_positions[idx]))
1261 /* End might end up being smaller than match_positions[idx] if the recursed
1262 * lookup ended up removing many items, more than we have had matched.
1263 * Just never rewind end back and get out of here.
1264 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1265 end = match_positions[idx];
1266 /* There can't be any further changes. */
1270 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1274 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1279 /* NOTE: delta is negative. */
1280 delta = hb_max (delta, (int) next - (int) count);
1285 memmove (match_positions + next + delta, match_positions + next,
1286 (count - next) * sizeof (match_positions[0]));
1290 /* Fill in new entries. */
1291 for (unsigned int j = idx + 1; j < next; j++)
1292 match_positions[j] = match_positions[j - 1] + 1;
1294 /* And fixup the rest. */
1295 for (; next < count; next++)
1296 match_positions[next] += delta;
1299 buffer->move_to (end);
1301 return_trace (true);
1306 /* Contextual lookups */
1308 struct ContextClosureLookupContext
1310 ContextClosureFuncs funcs;
1311 const void *intersects_data;
1314 struct ContextCollectGlyphsLookupContext
1316 ContextCollectGlyphsFuncs funcs;
1317 const void *collect_data;
1320 struct ContextApplyLookupContext
1322 ContextApplyFuncs funcs;
1323 const void *match_data;
1326 static inline bool context_intersects (const hb_set_t *glyphs,
1327 unsigned int inputCount, /* Including the first glyph (not matched) */
1328 const HBUINT16 input[], /* Array of input values--start with second glyph */
1329 ContextClosureLookupContext &lookup_context)
1331 return array_is_subset_of (glyphs,
1332 inputCount ? inputCount - 1 : 0, input,
1333 lookup_context.funcs.intersects, lookup_context.intersects_data);
1336 static inline void context_closure_lookup (hb_closure_context_t *c,
1337 unsigned int inputCount, /* Including the first glyph (not matched) */
1338 const HBUINT16 input[], /* Array of input values--start with second glyph */
1339 unsigned int lookupCount,
1340 const LookupRecord lookupRecord[],
1341 ContextClosureLookupContext &lookup_context)
1343 if (context_intersects (c->glyphs,
1347 lookupCount, lookupRecord);
1350 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1351 unsigned int inputCount, /* Including the first glyph (not matched) */
1352 const HBUINT16 input[], /* Array of input values--start with second glyph */
1353 unsigned int lookupCount,
1354 const LookupRecord lookupRecord[],
1355 ContextCollectGlyphsLookupContext &lookup_context)
1357 collect_array (c, c->input,
1358 inputCount ? inputCount - 1 : 0, input,
1359 lookup_context.funcs.collect, lookup_context.collect_data);
1361 lookupCount, lookupRecord);
1364 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1365 unsigned int inputCount, /* Including the first glyph (not matched) */
1366 const HBUINT16 input[], /* Array of input values--start with second glyph */
1367 unsigned int lookupCount HB_UNUSED,
1368 const LookupRecord lookupRecord[] HB_UNUSED,
1369 ContextApplyLookupContext &lookup_context)
1371 return would_match_input (c,
1373 lookup_context.funcs.match, lookup_context.match_data);
1375 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1376 unsigned int inputCount, /* Including the first glyph (not matched) */
1377 const HBUINT16 input[], /* Array of input values--start with second glyph */
1378 unsigned int lookupCount,
1379 const LookupRecord lookupRecord[],
1380 ContextApplyLookupContext &lookup_context)
1382 unsigned int match_length = 0;
1383 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1384 return match_input (c,
1386 lookup_context.funcs.match, lookup_context.match_data,
1387 &match_length, match_positions)
1388 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length),
1390 inputCount, match_positions,
1391 lookupCount, lookupRecord,
1397 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1399 return context_intersects (glyphs,
1400 inputCount, inputZ.arrayZ,
1404 void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1406 if (unlikely (c->lookup_limit_exceeded ())) return;
1408 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1409 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1410 context_closure_lookup (c,
1411 inputCount, inputZ.arrayZ,
1412 lookupCount, lookupRecord.arrayZ,
1416 void closure_lookups (hb_closure_lookups_context_t *c) const
1418 if (unlikely (c->lookup_limit_exceeded ())) return;
1420 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1421 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1422 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1425 void collect_glyphs (hb_collect_glyphs_context_t *c,
1426 ContextCollectGlyphsLookupContext &lookup_context) const
1428 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1429 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1430 context_collect_glyphs_lookup (c,
1431 inputCount, inputZ.arrayZ,
1432 lookupCount, lookupRecord.arrayZ,
1436 bool would_apply (hb_would_apply_context_t *c,
1437 ContextApplyLookupContext &lookup_context) const
1439 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1440 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1441 return context_would_apply_lookup (c,
1442 inputCount, inputZ.arrayZ,
1443 lookupCount, lookupRecord.arrayZ,
1447 bool apply (hb_ot_apply_context_t *c,
1448 ContextApplyLookupContext &lookup_context) const
1451 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1452 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1453 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1456 bool serialize (hb_serialize_context_t *c,
1457 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1458 const hb_map_t *lookup_map) const
1460 TRACE_SERIALIZE (this);
1461 auto *out = c->start_embed (this);
1462 if (unlikely (!c->extend_min (out))) return_trace (false);
1464 out->inputCount = inputCount;
1465 out->lookupCount = lookupCount;
1467 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1468 for (const auto org : input)
1471 d = input_mapping->get (org);
1475 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1476 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1477 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
1478 c->copy (lookupRecord[i], lookup_map);
1480 return_trace (true);
1483 bool subset (hb_subset_context_t *c,
1484 const hb_map_t *lookup_map,
1485 const hb_map_t *klass_map = nullptr) const
1487 TRACE_SUBSET (this);
1489 const hb_array_t<const HBUINT16> input = inputZ.as_array ((inputCount ? inputCount - 1 : 0));
1490 if (!input.length) return_trace (false);
1492 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1493 if (!hb_all (input, mapping)) return_trace (false);
1494 return_trace (serialize (c->serializer, mapping, lookup_map));
1498 bool sanitize (hb_sanitize_context_t *c) const
1500 TRACE_SANITIZE (this);
1501 return_trace (inputCount.sanitize (c) &&
1502 lookupCount.sanitize (c) &&
1503 c->check_range (inputZ.arrayZ,
1504 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1505 LookupRecord::static_size * lookupCount));
1509 HBUINT16 inputCount; /* Total number of glyphs in input
1510 * glyph sequence--includes the first
1512 HBUINT16 lookupCount; /* Number of LookupRecords */
1513 UnsizedArrayOf<HBUINT16>
1514 inputZ; /* Array of match inputs--start with
1516 /*UnsizedArrayOf<LookupRecord>
1517 lookupRecordX;*/ /* Array of LookupRecords--in
1520 DEFINE_SIZE_ARRAY (4, inputZ);
1525 bool intersects (const hb_set_t *glyphs,
1526 ContextClosureLookupContext &lookup_context) const
1530 | hb_map (hb_add (this))
1531 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1536 void closure (hb_closure_context_t *c,
1537 ContextClosureLookupContext &lookup_context) const
1539 if (unlikely (c->lookup_limit_exceeded ())) return;
1543 | hb_map (hb_add (this))
1544 | hb_apply ([&] (const Rule &_) { _.closure (c, lookup_context); })
1548 void closure_lookups (hb_closure_lookups_context_t *c) const
1550 if (unlikely (c->lookup_limit_exceeded ())) return;
1554 | hb_map (hb_add (this))
1555 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c); })
1559 void collect_glyphs (hb_collect_glyphs_context_t *c,
1560 ContextCollectGlyphsLookupContext &lookup_context) const
1564 | hb_map (hb_add (this))
1565 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1569 bool would_apply (hb_would_apply_context_t *c,
1570 ContextApplyLookupContext &lookup_context) const
1574 | hb_map (hb_add (this))
1575 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1580 bool apply (hb_ot_apply_context_t *c,
1581 ContextApplyLookupContext &lookup_context) const
1586 | hb_map (hb_add (this))
1587 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1593 bool subset (hb_subset_context_t *c,
1594 const hb_map_t *lookup_map,
1595 const hb_map_t *klass_map = nullptr) const
1597 TRACE_SUBSET (this);
1599 auto snap = c->serializer->snapshot ();
1600 auto *out = c->serializer->start_embed (*this);
1601 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1603 for (const OffsetTo<Rule>& _ : rule)
1606 auto *o = out->rule.serialize_append (c->serializer);
1607 if (unlikely (!o)) continue;
1609 auto o_snap = c->serializer->snapshot ();
1610 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1613 c->serializer->revert (o_snap);
1617 bool ret = bool (out->rule);
1618 if (!ret) c->serializer->revert (snap);
1623 bool sanitize (hb_sanitize_context_t *c) const
1625 TRACE_SANITIZE (this);
1626 return_trace (rule.sanitize (c, this));
1631 rule; /* Array of Rule tables
1632 * ordered by preference */
1634 DEFINE_SIZE_ARRAY (2, rule);
1638 struct ContextFormat1
1640 bool intersects (const hb_set_t *glyphs) const
1642 struct ContextClosureLookupContext lookup_context = {
1648 + hb_zip (this+coverage, ruleSet)
1649 | hb_filter (*glyphs, hb_first)
1650 | hb_map (hb_second)
1651 | hb_map (hb_add (this))
1652 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1657 void closure (hb_closure_context_t *c) const
1659 struct ContextClosureLookupContext lookup_context = {
1664 + hb_zip (this+coverage, ruleSet)
1665 | hb_filter (*c->glyphs, hb_first)
1666 | hb_map (hb_second)
1667 | hb_map (hb_add (this))
1668 | hb_apply ([&] (const RuleSet &_) { _.closure (c, lookup_context); })
1672 void closure_lookups (hb_closure_lookups_context_t *c) const
1675 | hb_map (hb_add (this))
1676 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c); })
1680 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1682 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1684 (this+coverage).collect_coverage (c->input);
1686 struct ContextCollectGlyphsLookupContext lookup_context = {
1692 | hb_map (hb_add (this))
1693 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1697 bool would_apply (hb_would_apply_context_t *c) const
1699 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1700 struct ContextApplyLookupContext lookup_context = {
1704 return rule_set.would_apply (c, lookup_context);
1707 const Coverage &get_coverage () const { return this+coverage; }
1709 bool apply (hb_ot_apply_context_t *c) const
1712 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1713 if (likely (index == NOT_COVERED))
1714 return_trace (false);
1716 const RuleSet &rule_set = this+ruleSet[index];
1717 struct ContextApplyLookupContext lookup_context = {
1721 return_trace (rule_set.apply (c, lookup_context));
1724 bool subset (hb_subset_context_t *c) const
1726 TRACE_SUBSET (this);
1727 const hb_set_t &glyphset = *c->plan->glyphset ();
1728 const hb_map_t &glyph_map = *c->plan->glyph_map;
1730 auto *out = c->serializer->start_embed (*this);
1731 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1732 out->format = format;
1734 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1735 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1736 + hb_zip (this+coverage, ruleSet)
1737 | hb_filter (glyphset, hb_first)
1738 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1740 | hb_map (glyph_map)
1741 | hb_sink (new_coverage)
1744 out->coverage.serialize (c->serializer, out)
1745 .serialize (c->serializer, new_coverage.iter ());
1746 return_trace (bool (new_coverage));
1749 bool sanitize (hb_sanitize_context_t *c) const
1751 TRACE_SANITIZE (this);
1752 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1756 HBUINT16 format; /* Format identifier--format = 1 */
1758 coverage; /* Offset to Coverage table--from
1759 * beginning of table */
1760 OffsetArrayOf<RuleSet>
1761 ruleSet; /* Array of RuleSet tables
1762 * ordered by Coverage Index */
1764 DEFINE_SIZE_ARRAY (6, ruleSet);
1768 struct ContextFormat2
1770 bool intersects (const hb_set_t *glyphs) const
1772 if (!(this+coverage).intersects (glyphs))
1775 const ClassDef &class_def = this+classDef;
1777 struct ContextClosureLookupContext lookup_context = {
1783 + hb_enumerate (ruleSet)
1784 | hb_map ([&] (const hb_pair_t<unsigned, const OffsetTo<RuleSet> &> p)
1785 { return class_def.intersects_class (glyphs, p.first) &&
1786 (this+p.second).intersects (glyphs, lookup_context); })
1791 void closure (hb_closure_context_t *c) const
1793 if (!(this+coverage).intersects (c->glyphs))
1796 const ClassDef &class_def = this+classDef;
1798 struct ContextClosureLookupContext lookup_context = {
1804 + hb_enumerate (ruleSet)
1805 | hb_filter ([&] (unsigned _)
1806 { return class_def.intersects_class (c->glyphs, _); },
1808 | hb_map (hb_second)
1809 | hb_map (hb_add (this))
1810 | hb_apply ([&] (const RuleSet &_) { _.closure (c, lookup_context); })
1814 void closure_lookups (hb_closure_lookups_context_t *c) const
1817 | hb_map (hb_add (this))
1818 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c); })
1822 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1824 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1826 (this+coverage).collect_coverage (c->input);
1828 const ClassDef &class_def = this+classDef;
1829 struct ContextCollectGlyphsLookupContext lookup_context = {
1835 | hb_map (hb_add (this))
1836 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1840 bool would_apply (hb_would_apply_context_t *c) const
1842 const ClassDef &class_def = this+classDef;
1843 unsigned int index = class_def.get_class (c->glyphs[0]);
1844 const RuleSet &rule_set = this+ruleSet[index];
1845 struct ContextApplyLookupContext lookup_context = {
1849 return rule_set.would_apply (c, lookup_context);
1852 const Coverage &get_coverage () const { return this+coverage; }
1854 bool apply (hb_ot_apply_context_t *c) const
1857 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1858 if (likely (index == NOT_COVERED)) return_trace (false);
1860 const ClassDef &class_def = this+classDef;
1861 index = class_def.get_class (c->buffer->cur().codepoint);
1862 const RuleSet &rule_set = this+ruleSet[index];
1863 struct ContextApplyLookupContext lookup_context = {
1867 return_trace (rule_set.apply (c, lookup_context));
1870 bool subset (hb_subset_context_t *c) const
1872 TRACE_SUBSET (this);
1873 auto *out = c->serializer->start_embed (*this);
1874 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1875 out->format = format;
1876 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
1877 return_trace (false);
1880 out->classDef.serialize_subset (c, classDef, this, &klass_map);
1882 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1884 unsigned non_zero_index = 0, index = 0;
1885 for (const hb_pair_t<unsigned, const OffsetTo<RuleSet>&> _ : + hb_enumerate (ruleSet)
1886 | hb_filter (klass_map, hb_first))
1888 auto *o = out->ruleSet.serialize_append (c->serializer);
1895 if (o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
1896 non_zero_index = index;
1901 if (!ret) return_trace (ret);
1903 //prune empty trailing ruleSets
1905 while (index > non_zero_index)
1907 out->ruleSet.pop ();
1911 return_trace (bool (out->ruleSet));
1914 bool sanitize (hb_sanitize_context_t *c) const
1916 TRACE_SANITIZE (this);
1917 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1921 HBUINT16 format; /* Format identifier--format = 2 */
1923 coverage; /* Offset to Coverage table--from
1924 * beginning of table */
1926 classDef; /* Offset to glyph ClassDef table--from
1927 * beginning of table */
1928 OffsetArrayOf<RuleSet>
1929 ruleSet; /* Array of RuleSet tables
1930 * ordered by class */
1932 DEFINE_SIZE_ARRAY (8, ruleSet);
1936 struct ContextFormat3
1938 bool intersects (const hb_set_t *glyphs) const
1940 if (!(this+coverageZ[0]).intersects (glyphs))
1943 struct ContextClosureLookupContext lookup_context = {
1944 {intersects_coverage},
1947 return context_intersects (glyphs,
1948 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1952 void closure (hb_closure_context_t *c) const
1954 if (!(this+coverageZ[0]).intersects (c->glyphs))
1957 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1958 struct ContextClosureLookupContext lookup_context = {
1959 {intersects_coverage},
1962 context_closure_lookup (c,
1963 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1964 lookupCount, lookupRecord,
1968 void closure_lookups (hb_closure_lookups_context_t *c) const
1970 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1971 recurse_lookups (c, lookupCount, lookupRecord);
1974 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1976 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1978 (this+coverageZ[0]).collect_coverage (c->input);
1980 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1981 struct ContextCollectGlyphsLookupContext lookup_context = {
1986 context_collect_glyphs_lookup (c,
1987 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1988 lookupCount, lookupRecord,
1992 bool would_apply (hb_would_apply_context_t *c) const
1994 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1995 struct ContextApplyLookupContext lookup_context = {
1999 return context_would_apply_lookup (c,
2000 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2001 lookupCount, lookupRecord,
2005 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2007 bool apply (hb_ot_apply_context_t *c) const
2010 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2011 if (likely (index == NOT_COVERED)) return_trace (false);
2013 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2014 struct ContextApplyLookupContext lookup_context = {
2018 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2021 bool subset (hb_subset_context_t *c) const
2023 TRACE_SUBSET (this);
2024 auto *out = c->serializer->start_embed (this);
2025 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2027 out->format = format;
2028 out->glyphCount = glyphCount;
2029 out->lookupCount = lookupCount;
2031 const hb_array_t<const OffsetTo<Coverage>> coverages = coverageZ.as_array (glyphCount);
2033 for (const OffsetTo<Coverage>& offset : coverages)
2035 auto *o = c->serializer->allocate_size<OffsetTo<Coverage>> (OffsetTo<Coverage>::static_size);
2036 if (unlikely (!o)) return_trace (false);
2037 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2040 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2041 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2042 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2043 c->serializer->copy (lookupRecord[i], lookup_map);
2045 return_trace (true);
2048 bool sanitize (hb_sanitize_context_t *c) const
2050 TRACE_SANITIZE (this);
2051 if (!c->check_struct (this)) return_trace (false);
2052 unsigned int count = glyphCount;
2053 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2054 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2055 for (unsigned int i = 0; i < count; i++)
2056 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2057 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2058 return_trace (c->check_array (lookupRecord, lookupCount));
2062 HBUINT16 format; /* Format identifier--format = 3 */
2063 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2065 HBUINT16 lookupCount; /* Number of LookupRecords */
2066 UnsizedArrayOf<OffsetTo<Coverage>>
2067 coverageZ; /* Array of offsets to Coverage
2068 * table in glyph sequence order */
2069 /*UnsizedArrayOf<LookupRecord>
2070 lookupRecordX;*/ /* Array of LookupRecords--in
2073 DEFINE_SIZE_ARRAY (6, coverageZ);
2078 template <typename context_t, typename ...Ts>
2079 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2081 TRACE_DISPATCH (this, u.format);
2082 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2084 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2085 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
2086 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
2087 default:return_trace (c->default_return_value ());
2093 HBUINT16 format; /* Format identifier */
2094 ContextFormat1 format1;
2095 ContextFormat2 format2;
2096 ContextFormat3 format3;
2101 /* Chaining Contextual lookups */
2103 struct ChainContextClosureLookupContext
2105 ContextClosureFuncs funcs;
2106 const void *intersects_data[3];
2109 struct ChainContextCollectGlyphsLookupContext
2111 ContextCollectGlyphsFuncs funcs;
2112 const void *collect_data[3];
2115 struct ChainContextApplyLookupContext
2117 ContextApplyFuncs funcs;
2118 const void *match_data[3];
2121 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2122 unsigned int backtrackCount,
2123 const HBUINT16 backtrack[],
2124 unsigned int inputCount, /* Including the first glyph (not matched) */
2125 const HBUINT16 input[], /* Array of input values--start with second glyph */
2126 unsigned int lookaheadCount,
2127 const HBUINT16 lookahead[],
2128 ChainContextClosureLookupContext &lookup_context)
2130 return array_is_subset_of (glyphs,
2131 backtrackCount, backtrack,
2132 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2133 && array_is_subset_of (glyphs,
2134 inputCount ? inputCount - 1 : 0, input,
2135 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2136 && array_is_subset_of (glyphs,
2137 lookaheadCount, lookahead,
2138 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2141 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2142 unsigned int backtrackCount,
2143 const HBUINT16 backtrack[],
2144 unsigned int inputCount, /* Including the first glyph (not matched) */
2145 const HBUINT16 input[], /* Array of input values--start with second glyph */
2146 unsigned int lookaheadCount,
2147 const HBUINT16 lookahead[],
2148 unsigned int lookupCount,
2149 const LookupRecord lookupRecord[],
2150 ChainContextClosureLookupContext &lookup_context)
2152 if (chain_context_intersects (c->glyphs,
2153 backtrackCount, backtrack,
2155 lookaheadCount, lookahead,
2158 lookupCount, lookupRecord);
2161 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2162 unsigned int backtrackCount,
2163 const HBUINT16 backtrack[],
2164 unsigned int inputCount, /* Including the first glyph (not matched) */
2165 const HBUINT16 input[], /* Array of input values--start with second glyph */
2166 unsigned int lookaheadCount,
2167 const HBUINT16 lookahead[],
2168 unsigned int lookupCount,
2169 const LookupRecord lookupRecord[],
2170 ChainContextCollectGlyphsLookupContext &lookup_context)
2172 collect_array (c, c->before,
2173 backtrackCount, backtrack,
2174 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2175 collect_array (c, c->input,
2176 inputCount ? inputCount - 1 : 0, input,
2177 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2178 collect_array (c, c->after,
2179 lookaheadCount, lookahead,
2180 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2182 lookupCount, lookupRecord);
2185 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2186 unsigned int backtrackCount,
2187 const HBUINT16 backtrack[] HB_UNUSED,
2188 unsigned int inputCount, /* Including the first glyph (not matched) */
2189 const HBUINT16 input[], /* Array of input values--start with second glyph */
2190 unsigned int lookaheadCount,
2191 const HBUINT16 lookahead[] HB_UNUSED,
2192 unsigned int lookupCount HB_UNUSED,
2193 const LookupRecord lookupRecord[] HB_UNUSED,
2194 ChainContextApplyLookupContext &lookup_context)
2196 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2197 && would_match_input (c,
2199 lookup_context.funcs.match, lookup_context.match_data[1]);
2202 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2203 unsigned int backtrackCount,
2204 const HBUINT16 backtrack[],
2205 unsigned int inputCount, /* Including the first glyph (not matched) */
2206 const HBUINT16 input[], /* Array of input values--start with second glyph */
2207 unsigned int lookaheadCount,
2208 const HBUINT16 lookahead[],
2209 unsigned int lookupCount,
2210 const LookupRecord lookupRecord[],
2211 ChainContextApplyLookupContext &lookup_context)
2213 unsigned int start_index = 0, match_length = 0, end_index = 0;
2214 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
2215 return match_input (c,
2217 lookup_context.funcs.match, lookup_context.match_data[1],
2218 &match_length, match_positions)
2219 && match_backtrack (c,
2220 backtrackCount, backtrack,
2221 lookup_context.funcs.match, lookup_context.match_data[0],
2223 && match_lookahead (c,
2224 lookaheadCount, lookahead,
2225 lookup_context.funcs.match, lookup_context.match_data[2],
2226 match_length, &end_index)
2227 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index),
2229 inputCount, match_positions,
2230 lookupCount, lookupRecord,
2236 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2238 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2239 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2240 return chain_context_intersects (glyphs,
2241 backtrack.len, backtrack.arrayZ,
2242 input.lenP1, input.arrayZ,
2243 lookahead.len, lookahead.arrayZ,
2247 void closure (hb_closure_context_t *c,
2248 ChainContextClosureLookupContext &lookup_context) const
2250 if (unlikely (c->lookup_limit_exceeded ())) return;
2252 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2253 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2254 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2255 chain_context_closure_lookup (c,
2256 backtrack.len, backtrack.arrayZ,
2257 input.lenP1, input.arrayZ,
2258 lookahead.len, lookahead.arrayZ,
2259 lookup.len, lookup.arrayZ,
2263 void closure_lookups (hb_closure_lookups_context_t *c) const
2265 if (unlikely (c->lookup_limit_exceeded ())) return;
2267 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2268 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2269 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2270 recurse_lookups (c, lookup.len, lookup.arrayZ);
2273 void collect_glyphs (hb_collect_glyphs_context_t *c,
2274 ChainContextCollectGlyphsLookupContext &lookup_context) const
2276 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2277 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2278 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2279 chain_context_collect_glyphs_lookup (c,
2280 backtrack.len, backtrack.arrayZ,
2281 input.lenP1, input.arrayZ,
2282 lookahead.len, lookahead.arrayZ,
2283 lookup.len, lookup.arrayZ,
2287 bool would_apply (hb_would_apply_context_t *c,
2288 ChainContextApplyLookupContext &lookup_context) const
2290 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2291 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2292 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2293 return chain_context_would_apply_lookup (c,
2294 backtrack.len, backtrack.arrayZ,
2295 input.lenP1, input.arrayZ,
2296 lookahead.len, lookahead.arrayZ, lookup.len,
2297 lookup.arrayZ, lookup_context);
2300 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2303 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2304 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2305 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2306 return_trace (chain_context_apply_lookup (c,
2307 backtrack.len, backtrack.arrayZ,
2308 input.lenP1, input.arrayZ,
2309 lookahead.len, lookahead.arrayZ, lookup.len,
2310 lookup.arrayZ, lookup_context));
2313 template<typename Iterator,
2314 hb_requires (hb_is_iterator (Iterator))>
2315 void serialize_array (hb_serialize_context_t *c,
2320 for (const auto g : it)
2328 ChainRule* copy (hb_serialize_context_t *c,
2329 const hb_map_t *lookup_map,
2330 const hb_map_t *backtrack_map,
2331 const hb_map_t *input_map = nullptr,
2332 const hb_map_t *lookahead_map = nullptr) const
2334 TRACE_SERIALIZE (this);
2335 auto *out = c->start_embed (this);
2336 if (unlikely (!out)) return_trace (nullptr);
2338 const hb_map_t *mapping = backtrack_map;
2339 serialize_array (c, backtrack.len, + backtrack.iter ()
2340 | hb_map (mapping));
2342 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2343 if (input_map) mapping = input_map;
2344 serialize_array (c, input.lenP1, + input.iter ()
2345 | hb_map (mapping));
2347 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2348 if (lookahead_map) mapping = lookahead_map;
2349 serialize_array (c, lookahead.len, + lookahead.iter ()
2350 | hb_map (mapping));
2352 const ArrayOf<LookupRecord> &lookupRecord = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2353 HBUINT16 lookupCount;
2354 lookupCount = lookupRecord.len;
2355 if (!c->copy (lookupCount)) return_trace (nullptr);
2357 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2358 if (!c->copy (lookupRecord[i], lookup_map)) return_trace (nullptr);
2363 bool subset (hb_subset_context_t *c,
2364 const hb_map_t *lookup_map,
2365 const hb_map_t *backtrack_map = nullptr,
2366 const hb_map_t *input_map = nullptr,
2367 const hb_map_t *lookahead_map = nullptr) const
2369 TRACE_SUBSET (this);
2371 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2372 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2376 const hb_set_t &glyphset = *c->plan->glyphset ();
2377 if (!hb_all (backtrack, glyphset) ||
2378 !hb_all (input, glyphset) ||
2379 !hb_all (lookahead, glyphset))
2380 return_trace (false);
2382 copy (c->serializer, lookup_map, c->plan->glyph_map);
2386 if (!hb_all (backtrack, backtrack_map) ||
2387 !hb_all (input, input_map) ||
2388 !hb_all (lookahead, lookahead_map))
2389 return_trace (false);
2391 copy (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2394 return_trace (true);
2397 bool sanitize (hb_sanitize_context_t *c) const
2399 TRACE_SANITIZE (this);
2400 if (!backtrack.sanitize (c)) return_trace (false);
2401 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2402 if (!input.sanitize (c)) return_trace (false);
2403 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2404 if (!lookahead.sanitize (c)) return_trace (false);
2405 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2406 return_trace (lookup.sanitize (c));
2411 backtrack; /* Array of backtracking values
2412 * (to be matched before the input
2414 HeadlessArrayOf<HBUINT16>
2415 inputX; /* Array of input values (start with
2418 lookaheadX; /* Array of lookahead values's (to be
2419 * matched after the input sequence) */
2420 ArrayOf<LookupRecord>
2421 lookupX; /* Array of LookupRecords--in
2424 DEFINE_SIZE_MIN (8);
2429 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2433 | hb_map (hb_add (this))
2434 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2438 void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
2440 if (unlikely (c->lookup_limit_exceeded ())) return;
2444 | hb_map (hb_add (this))
2445 | hb_apply ([&] (const ChainRule &_) { _.closure (c, lookup_context); })
2449 void closure_lookups (hb_closure_lookups_context_t *c) const
2451 if (unlikely (c->lookup_limit_exceeded ())) return;
2455 | hb_map (hb_add (this))
2456 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c); })
2460 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2464 | hb_map (hb_add (this))
2465 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2469 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2473 | hb_map (hb_add (this))
2474 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2479 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2484 | hb_map (hb_add (this))
2485 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2491 bool subset (hb_subset_context_t *c,
2492 const hb_map_t *lookup_map,
2493 const hb_map_t *backtrack_klass_map = nullptr,
2494 const hb_map_t *input_klass_map = nullptr,
2495 const hb_map_t *lookahead_klass_map = nullptr) const
2497 TRACE_SUBSET (this);
2499 auto snap = c->serializer->snapshot ();
2500 auto *out = c->serializer->start_embed (*this);
2501 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2503 for (const OffsetTo<ChainRule>& _ : rule)
2506 auto *o = out->rule.serialize_append (c->serializer);
2507 if (unlikely (!o)) continue;
2509 auto o_snap = c->serializer->snapshot ();
2510 if (!o->serialize_subset (c, _, this,
2512 backtrack_klass_map,
2514 lookahead_klass_map))
2517 c->serializer->revert (o_snap);
2521 bool ret = bool (out->rule);
2522 if (!ret) c->serializer->revert (snap);
2527 bool sanitize (hb_sanitize_context_t *c) const
2529 TRACE_SANITIZE (this);
2530 return_trace (rule.sanitize (c, this));
2534 OffsetArrayOf<ChainRule>
2535 rule; /* Array of ChainRule tables
2536 * ordered by preference */
2538 DEFINE_SIZE_ARRAY (2, rule);
2541 struct ChainContextFormat1
2543 bool intersects (const hb_set_t *glyphs) const
2545 struct ChainContextClosureLookupContext lookup_context = {
2547 {nullptr, nullptr, nullptr}
2551 + hb_zip (this+coverage, ruleSet)
2552 | hb_filter (*glyphs, hb_first)
2553 | hb_map (hb_second)
2554 | hb_map (hb_add (this))
2555 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2560 void closure (hb_closure_context_t *c) const
2562 struct ChainContextClosureLookupContext lookup_context = {
2564 {nullptr, nullptr, nullptr}
2567 + hb_zip (this+coverage, ruleSet)
2568 | hb_filter (*c->glyphs, hb_first)
2569 | hb_map (hb_second)
2570 | hb_map (hb_add (this))
2571 | hb_apply ([&] (const ChainRuleSet &_) { _.closure (c, lookup_context); })
2575 void closure_lookups (hb_closure_lookups_context_t *c) const
2578 | hb_map (hb_add (this))
2579 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c); })
2583 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2585 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2587 (this+coverage).collect_coverage (c->input);
2589 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2591 {nullptr, nullptr, nullptr}
2595 | hb_map (hb_add (this))
2596 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2600 bool would_apply (hb_would_apply_context_t *c) const
2602 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2603 struct ChainContextApplyLookupContext lookup_context = {
2605 {nullptr, nullptr, nullptr}
2607 return rule_set.would_apply (c, lookup_context);
2610 const Coverage &get_coverage () const { return this+coverage; }
2612 bool apply (hb_ot_apply_context_t *c) const
2615 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2616 if (likely (index == NOT_COVERED)) return_trace (false);
2618 const ChainRuleSet &rule_set = this+ruleSet[index];
2619 struct ChainContextApplyLookupContext lookup_context = {
2621 {nullptr, nullptr, nullptr}
2623 return_trace (rule_set.apply (c, lookup_context));
2626 bool subset (hb_subset_context_t *c) const
2628 TRACE_SUBSET (this);
2629 const hb_set_t &glyphset = *c->plan->glyphset ();
2630 const hb_map_t &glyph_map = *c->plan->glyph_map;
2632 auto *out = c->serializer->start_embed (*this);
2633 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2634 out->format = format;
2636 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2637 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2638 + hb_zip (this+coverage, ruleSet)
2639 | hb_filter (glyphset, hb_first)
2640 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2642 | hb_map (glyph_map)
2643 | hb_sink (new_coverage)
2646 out->coverage.serialize (c->serializer, out)
2647 .serialize (c->serializer, new_coverage.iter ());
2648 return_trace (bool (new_coverage));
2651 bool sanitize (hb_sanitize_context_t *c) const
2653 TRACE_SANITIZE (this);
2654 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2658 HBUINT16 format; /* Format identifier--format = 1 */
2660 coverage; /* Offset to Coverage table--from
2661 * beginning of table */
2662 OffsetArrayOf<ChainRuleSet>
2663 ruleSet; /* Array of ChainRuleSet tables
2664 * ordered by Coverage Index */
2666 DEFINE_SIZE_ARRAY (6, ruleSet);
2669 struct ChainContextFormat2
2671 bool intersects (const hb_set_t *glyphs) const
2673 if (!(this+coverage).intersects (glyphs))
2676 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2677 const ClassDef &input_class_def = this+inputClassDef;
2678 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2680 struct ChainContextClosureLookupContext lookup_context = {
2682 {&backtrack_class_def,
2684 &lookahead_class_def}
2688 + hb_enumerate (ruleSet)
2689 | hb_map ([&] (const hb_pair_t<unsigned, const OffsetTo<ChainRuleSet> &> p)
2690 { return input_class_def.intersects_class (glyphs, p.first) &&
2691 (this+p.second).intersects (glyphs, lookup_context); })
2695 void closure (hb_closure_context_t *c) const
2697 if (!(this+coverage).intersects (c->glyphs))
2700 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2701 const ClassDef &input_class_def = this+inputClassDef;
2702 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2704 struct ChainContextClosureLookupContext lookup_context = {
2706 {&backtrack_class_def,
2708 &lookahead_class_def}
2712 + hb_enumerate (ruleSet)
2713 | hb_filter ([&] (unsigned _)
2714 { return input_class_def.intersects_class (c->glyphs, _); },
2716 | hb_map (hb_second)
2717 | hb_map (hb_add (this))
2718 | hb_apply ([&] (const ChainRuleSet &_) { _.closure (c, lookup_context); })
2722 void closure_lookups (hb_closure_lookups_context_t *c) const
2725 | hb_map (hb_add (this))
2726 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c); })
2730 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2732 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2734 (this+coverage).collect_coverage (c->input);
2736 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2737 const ClassDef &input_class_def = this+inputClassDef;
2738 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2740 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2742 {&backtrack_class_def,
2744 &lookahead_class_def}
2748 | hb_map (hb_add (this))
2749 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2753 bool would_apply (hb_would_apply_context_t *c) const
2755 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2756 const ClassDef &input_class_def = this+inputClassDef;
2757 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2759 unsigned int index = input_class_def.get_class (c->glyphs[0]);
2760 const ChainRuleSet &rule_set = this+ruleSet[index];
2761 struct ChainContextApplyLookupContext lookup_context = {
2763 {&backtrack_class_def,
2765 &lookahead_class_def}
2767 return rule_set.would_apply (c, lookup_context);
2770 const Coverage &get_coverage () const { return this+coverage; }
2772 bool apply (hb_ot_apply_context_t *c) const
2775 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2776 if (likely (index == NOT_COVERED)) return_trace (false);
2778 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2779 const ClassDef &input_class_def = this+inputClassDef;
2780 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2782 index = input_class_def.get_class (c->buffer->cur().codepoint);
2783 const ChainRuleSet &rule_set = this+ruleSet[index];
2784 struct ChainContextApplyLookupContext lookup_context = {
2786 {&backtrack_class_def,
2788 &lookahead_class_def}
2790 return_trace (rule_set.apply (c, lookup_context));
2793 bool subset (hb_subset_context_t *c) const
2795 TRACE_SUBSET (this);
2796 auto *out = c->serializer->start_embed (*this);
2797 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2798 out->format = format;
2799 out->coverage.serialize_subset (c, coverage, this);
2801 hb_map_t backtrack_klass_map;
2802 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
2804 // subset inputClassDef based on glyphs survived in Coverage subsetting
2805 hb_map_t input_klass_map;
2806 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
2808 hb_map_t lookahead_klass_map;
2809 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
2811 unsigned non_zero_index = 0, index = 0;
2813 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2814 for (const OffsetTo<ChainRuleSet>& _ : + hb_enumerate (ruleSet)
2815 | hb_filter (input_klass_map, hb_first)
2816 | hb_map (hb_second))
2818 auto *o = out->ruleSet.serialize_append (c->serializer);
2824 if (o->serialize_subset (c, _, this,
2826 &backtrack_klass_map,
2828 &lookahead_klass_map))
2829 non_zero_index = index;
2834 if (!ret) return_trace (ret);
2836 //prune empty trailing ruleSets
2838 while (index > non_zero_index)
2840 out->ruleSet.pop ();
2844 return_trace (bool (out->ruleSet));
2847 bool sanitize (hb_sanitize_context_t *c) const
2849 TRACE_SANITIZE (this);
2850 return_trace (coverage.sanitize (c, this) &&
2851 backtrackClassDef.sanitize (c, this) &&
2852 inputClassDef.sanitize (c, this) &&
2853 lookaheadClassDef.sanitize (c, this) &&
2854 ruleSet.sanitize (c, this));
2858 HBUINT16 format; /* Format identifier--format = 2 */
2860 coverage; /* Offset to Coverage table--from
2861 * beginning of table */
2863 backtrackClassDef; /* Offset to glyph ClassDef table
2864 * containing backtrack sequence
2865 * data--from beginning of table */
2867 inputClassDef; /* Offset to glyph ClassDef
2868 * table containing input sequence
2869 * data--from beginning of table */
2871 lookaheadClassDef; /* Offset to glyph ClassDef table
2872 * containing lookahead sequence
2873 * data--from beginning of table */
2874 OffsetArrayOf<ChainRuleSet>
2875 ruleSet; /* Array of ChainRuleSet tables
2876 * ordered by class */
2878 DEFINE_SIZE_ARRAY (12, ruleSet);
2881 struct ChainContextFormat3
2883 bool intersects (const hb_set_t *glyphs) const
2885 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2887 if (!(this+input[0]).intersects (glyphs))
2890 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2891 struct ChainContextClosureLookupContext lookup_context = {
2892 {intersects_coverage},
2895 return chain_context_intersects (glyphs,
2896 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2897 input.len, (const HBUINT16 *) input.arrayZ + 1,
2898 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2902 void closure (hb_closure_context_t *c) const
2904 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2906 if (!(this+input[0]).intersects (c->glyphs))
2909 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2910 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2911 struct ChainContextClosureLookupContext lookup_context = {
2912 {intersects_coverage},
2915 chain_context_closure_lookup (c,
2916 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2917 input.len, (const HBUINT16 *) input.arrayZ + 1,
2918 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2919 lookup.len, lookup.arrayZ,
2923 void closure_lookups (hb_closure_lookups_context_t *c) const
2925 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2926 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2927 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2928 recurse_lookups (c, lookup.len, lookup.arrayZ);
2931 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2933 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2935 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2937 (this+input[0]).collect_coverage (c->input);
2939 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2940 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2941 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2945 chain_context_collect_glyphs_lookup (c,
2946 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2947 input.len, (const HBUINT16 *) input.arrayZ + 1,
2948 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2949 lookup.len, lookup.arrayZ,
2953 bool would_apply (hb_would_apply_context_t *c) const
2955 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2956 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2957 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2958 struct ChainContextApplyLookupContext lookup_context = {
2962 return chain_context_would_apply_lookup (c,
2963 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2964 input.len, (const HBUINT16 *) input.arrayZ + 1,
2965 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2966 lookup.len, lookup.arrayZ, lookup_context);
2969 const Coverage &get_coverage () const
2971 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2972 return this+input[0];
2975 bool apply (hb_ot_apply_context_t *c) const
2978 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2980 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
2981 if (likely (index == NOT_COVERED)) return_trace (false);
2983 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2984 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2985 struct ChainContextApplyLookupContext lookup_context = {
2989 return_trace (chain_context_apply_lookup (c,
2990 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2991 input.len, (const HBUINT16 *) input.arrayZ + 1,
2992 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2993 lookup.len, lookup.arrayZ, lookup_context));
2996 template<typename Iterator,
2997 hb_requires (hb_is_iterator (Iterator))>
2998 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3000 TRACE_SERIALIZE (this);
3001 auto *out = c->serializer->start_embed<OffsetArrayOf<Coverage>> ();
3003 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size))) return_trace (false);
3006 | hb_apply (subset_offset_array (c, *out, base))
3009 return_trace (out->len);
3012 bool subset (hb_subset_context_t *c) const
3014 TRACE_SUBSET (this);
3016 auto *out = c->serializer->start_embed (this);
3017 if (unlikely (!out)) return_trace (false);
3018 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3020 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3021 return_trace (false);
3023 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
3024 if (!serialize_coverage_offsets (c, input.iter (), this))
3025 return_trace (false);
3027 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
3028 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3029 return_trace (false);
3031 const ArrayOf<LookupRecord> &lookupRecord = StructAfter<ArrayOf<LookupRecord>> (lookahead);
3032 HBUINT16 lookupCount;
3033 lookupCount = lookupRecord.len;
3034 if (!c->serializer->copy (lookupCount)) return_trace (false);
3036 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3037 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
3038 if (!c->serializer->copy (lookupRecord[i], lookup_map)) return_trace (false);
3040 return_trace (true);
3043 bool sanitize (hb_sanitize_context_t *c) const
3045 TRACE_SANITIZE (this);
3046 if (!backtrack.sanitize (c, this)) return_trace (false);
3047 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
3048 if (!input.sanitize (c, this)) return_trace (false);
3049 if (!input.len) return_trace (false); /* To be consistent with Context. */
3050 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
3051 if (!lookahead.sanitize (c, this)) return_trace (false);
3052 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
3053 return_trace (lookup.sanitize (c));
3057 HBUINT16 format; /* Format identifier--format = 3 */
3058 OffsetArrayOf<Coverage>
3059 backtrack; /* Array of coverage tables
3060 * in backtracking sequence, in glyph
3062 OffsetArrayOf<Coverage>
3063 inputX ; /* Array of coverage
3064 * tables in input sequence, in glyph
3066 OffsetArrayOf<Coverage>
3067 lookaheadX; /* Array of coverage tables
3068 * in lookahead sequence, in glyph
3070 ArrayOf<LookupRecord>
3071 lookupX; /* Array of LookupRecords--in
3074 DEFINE_SIZE_MIN (10);
3079 template <typename context_t, typename ...Ts>
3080 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3082 TRACE_DISPATCH (this, u.format);
3083 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3085 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
3086 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
3087 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
3088 default:return_trace (c->default_return_value ());
3094 HBUINT16 format; /* Format identifier */
3095 ChainContextFormat1 format1;
3096 ChainContextFormat2 format2;
3097 ChainContextFormat3 format3;
3102 template <typename T>
3103 struct ExtensionFormat1
3105 unsigned int get_type () const { return extensionLookupType; }
3107 template <typename X>
3108 const X& get_subtable () const
3109 { return this + reinterpret_cast<const LOffsetTo<typename T::SubTable> &> (extensionOffset); }
3111 template <typename context_t, typename ...Ts>
3112 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3114 TRACE_DISPATCH (this, format);
3115 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3116 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), hb_forward<Ts> (ds)...));
3119 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3122 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
3123 bool sanitize (hb_sanitize_context_t *c) const
3125 TRACE_SANITIZE (this);
3126 return_trace (c->check_struct (this) &&
3127 extensionLookupType != T::SubTable::Extension);
3131 HBUINT16 format; /* Format identifier. Set to 1. */
3132 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3133 * by ExtensionOffset (i.e. the
3134 * extension subtable). */
3135 Offset32 extensionOffset; /* Offset to the extension subtable,
3136 * of lookup type subtable. */
3138 DEFINE_SIZE_STATIC (8);
3141 template <typename T>
3144 unsigned int get_type () const
3147 case 1: return u.format1.get_type ();
3151 template <typename X>
3152 const X& get_subtable () const
3155 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3156 default:return Null (typename T::SubTable);
3160 template <typename context_t, typename ...Ts>
3161 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3163 TRACE_DISPATCH (this, u.format);
3164 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3166 case 1: return_trace (u.format1.dispatch (c, hb_forward<Ts> (ds)...));
3167 default:return_trace (c->default_return_value ());
3173 HBUINT16 format; /* Format identifier */
3174 ExtensionFormat1<T> format1;
3183 struct hb_ot_layout_lookup_accelerator_t
3185 template <typename TLookup>
3186 void init (const TLookup &lookup)
3189 lookup.collect_coverage (&digest);
3192 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3193 lookup.dispatch (&c_get_subtables);
3195 void fini () { subtables.fini (); }
3197 bool may_have (hb_codepoint_t g) const
3198 { return digest.may_have (g); }
3200 bool apply (hb_ot_apply_context_t *c) const
3202 for (unsigned int i = 0; i < subtables.length; i++)
3203 if (subtables[i].apply (c))
3209 hb_set_digest_t digest;
3210 hb_get_subtables_context_t::array_t subtables;
3215 bool has_data () const { return version.to_int (); }
3216 unsigned int get_script_count () const
3217 { return (this+scriptList).len; }
3218 const Tag& get_script_tag (unsigned int i) const
3219 { return (this+scriptList).get_tag (i); }
3220 unsigned int get_script_tags (unsigned int start_offset,
3221 unsigned int *script_count /* IN/OUT */,
3222 hb_tag_t *script_tags /* OUT */) const
3223 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
3224 const Script& get_script (unsigned int i) const
3225 { return (this+scriptList)[i]; }
3226 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3227 { return (this+scriptList).find_index (tag, index); }
3229 unsigned int get_feature_count () const
3230 { return (this+featureList).len; }
3231 hb_tag_t get_feature_tag (unsigned int i) const
3232 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
3233 unsigned int get_feature_tags (unsigned int start_offset,
3234 unsigned int *feature_count /* IN/OUT */,
3235 hb_tag_t *feature_tags /* OUT */) const
3236 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
3237 const Feature& get_feature (unsigned int i) const
3238 { return (this+featureList)[i]; }
3239 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3240 { return (this+featureList).find_index (tag, index); }
3242 unsigned int get_lookup_count () const
3243 { return (this+lookupList).len; }
3244 const Lookup& get_lookup (unsigned int i) const
3245 { return (this+lookupList)[i]; }
3247 bool find_variations_index (const int *coords, unsigned int num_coords,
3248 unsigned int *index) const
3251 *index = FeatureVariations::NOT_FOUND_INDEX;
3254 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3255 .find_index (coords, num_coords, index);
3257 const Feature& get_feature_variation (unsigned int feature_index,
3258 unsigned int variations_index) const
3261 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3262 version.to_int () >= 0x00010001u)
3264 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3270 return get_feature (feature_index);
3273 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3274 hb_set_t *lookup_indexes /* OUT */) const
3277 if (version.to_int () >= 0x00010001u)
3278 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3282 template <typename TLookup>
3283 bool subset (hb_subset_layout_context_t *c) const
3285 TRACE_SUBSET (this);
3286 auto *out = c->subset_context->serializer->embed (*this);
3287 if (unlikely (!out)) return_trace (false);
3289 typedef LookupOffsetList<TLookup> TLookupList;
3290 reinterpret_cast<OffsetTo<TLookupList> &> (out->lookupList)
3291 .serialize_subset (c->subset_context,
3292 reinterpret_cast<const OffsetTo<TLookupList> &> (lookupList),
3296 reinterpret_cast<OffsetTo<RecordListOfFeature> &> (out->featureList)
3297 .serialize_subset (c->subset_context,
3298 reinterpret_cast<const OffsetTo<RecordListOfFeature> &> (featureList),
3302 out->scriptList.serialize_subset (c->subset_context,
3308 if (version.to_int () >= 0x00010001u)
3310 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3313 out->version.major = 1;
3314 out->version.minor = 0;
3319 return_trace (true);
3322 void closure_features (const hb_map_t *lookup_indexes, /* IN */
3323 hb_set_t *feature_indexes /* OUT */) const
3325 unsigned int feature_count = hb_min (get_feature_count (), (unsigned) HB_MAX_FEATURES);
3326 for (unsigned i = 0; i < feature_count; i++)
3328 const Feature& f = get_feature (i);
3329 if ((!f.featureParams.is_null ()) || f.intersects_lookup_indexes (lookup_indexes))
3330 feature_indexes->add (i);
3333 if (version.to_int () >= 0x00010001u)
3334 (this+featureVars).closure_features (lookup_indexes, feature_indexes);
3338 unsigned int get_size () const
3341 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3344 template <typename TLookup>
3345 bool sanitize (hb_sanitize_context_t *c) const
3347 TRACE_SANITIZE (this);
3348 typedef OffsetListOf<TLookup> TLookupList;
3349 if (unlikely (!(version.sanitize (c) &&
3350 likely (version.major == 1) &&
3351 scriptList.sanitize (c, this) &&
3352 featureList.sanitize (c, this) &&
3353 reinterpret_cast<const OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
3354 return_trace (false);
3357 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3358 return_trace (false);
3361 return_trace (true);
3364 template <typename T>
3365 struct accelerator_t
3367 void init (hb_face_t *face)
3369 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3370 if (unlikely (this->table->is_blacklisted (this->table.get_blob (), face)))
3372 hb_blob_destroy (this->table.get_blob ());
3373 this->table = hb_blob_get_empty ();
3376 this->lookup_count = table->get_lookup_count ();
3378 this->accels = (hb_ot_layout_lookup_accelerator_t *) calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3379 if (unlikely (!this->accels))
3380 this->lookup_count = 0;
3382 for (unsigned int i = 0; i < this->lookup_count; i++)
3383 this->accels[i].init (table->get_lookup (i));
3388 for (unsigned int i = 0; i < this->lookup_count; i++)
3389 this->accels[i].fini ();
3390 free (this->accels);
3391 this->table.destroy ();
3394 hb_blob_ptr_t<T> table;
3395 unsigned int lookup_count;
3396 hb_ot_layout_lookup_accelerator_t *accels;
3400 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3402 OffsetTo<ScriptList>
3403 scriptList; /* ScriptList table */
3404 OffsetTo<FeatureList>
3405 featureList; /* FeatureList table */
3406 OffsetTo<LookupList>
3407 lookupList; /* LookupList table */
3408 LOffsetTo<FeatureVariations>
3409 featureVars; /* Offset to Feature Variations
3410 table--from beginning of table
3411 * (may be NULL). Introduced
3412 * in version 0x00010001. */
3414 DEFINE_SIZE_MIN (10);
3418 } /* namespace OT */
3421 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */