2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
33 #include "hb-buffer.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
49 static return_t default_return_value () { return false; }
50 bool stop_sublookup_iteration (return_t r) const { return r; }
52 const hb_set_t *glyphs;
54 hb_intersects_context_t (const hb_set_t *glyphs_) :
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
63 static return_t default_return_value () { return false; }
64 bool stop_sublookup_iteration (return_t r) const { return r; }
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
73 static return_t default_return_value () { return hb_empty_t (); }
74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
84 void reset_lookup_visit_count ()
87 bool lookup_limit_exceeded ()
88 { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
90 bool should_visit_lookup (unsigned int lookup_index)
92 if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
95 if (is_lookup_done (lookup_index))
101 bool is_lookup_done (unsigned int lookup_index)
103 if (done_lookups_glyph_count->in_error () ||
104 done_lookups_glyph_set->in_error ())
107 /* Have we visited this lookup with the current set of glyphs? */
108 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
110 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
112 if (!done_lookups_glyph_set->get (lookup_index))
114 hb_set_t* empty_set = hb_set_create ();
115 if (unlikely (!done_lookups_glyph_set->set (lookup_index, empty_set)))
117 hb_set_destroy (empty_set);
122 hb_set_clear (done_lookups_glyph_set->get (lookup_index));
125 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
126 if (unlikely (covered_glyph_set->in_error ()))
128 if (parent_active_glyphs ().is_subset (*covered_glyph_set))
131 covered_glyph_set->union_ (parent_active_glyphs ());
135 const hb_set_t& previous_parent_active_glyphs () {
136 if (active_glyphs_stack.length <= 1)
139 return active_glyphs_stack[active_glyphs_stack.length - 2];
142 const hb_set_t& parent_active_glyphs ()
144 if (!active_glyphs_stack)
147 return active_glyphs_stack.tail ();
150 hb_set_t& push_cur_active_glyphs ()
152 return *active_glyphs_stack.push ();
155 bool pop_cur_done_glyphs ()
157 if (active_glyphs_stack.length < 1)
160 active_glyphs_stack.pop ();
167 hb_vector_t<hb_set_t> active_glyphs_stack;
168 recurse_func_t recurse_func;
169 unsigned int nesting_level_left;
171 hb_closure_context_t (hb_face_t *face_,
173 hb_map_t *done_lookups_glyph_count_,
174 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set_,
175 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
178 recurse_func (nullptr),
179 nesting_level_left (nesting_level_left_),
180 done_lookups_glyph_count (done_lookups_glyph_count_),
181 done_lookups_glyph_set (done_lookups_glyph_set_),
185 ~hb_closure_context_t () { flush (); }
187 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
191 output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */
192 glyphs->union_ (*output);
194 active_glyphs_stack.pop ();
195 active_glyphs_stack.reset ();
199 hb_map_t *done_lookups_glyph_count;
200 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set;
201 unsigned int lookup_count;
206 struct hb_closure_lookups_context_t :
207 hb_dispatch_context_t<hb_closure_lookups_context_t>
209 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
210 template <typename T>
211 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
212 static return_t default_return_value () { return hb_empty_t (); }
213 void recurse (unsigned lookup_index)
215 if (unlikely (nesting_level_left == 0 || !recurse_func))
218 /* Return if new lookup was recursed to before. */
219 if (lookup_limit_exceeded ()
220 || visited_lookups->in_error ()
221 || visited_lookups->has (lookup_index))
222 // Don't increment lookup count here, that will be done in the call to closure_lookups()
223 // made by recurse_func.
226 nesting_level_left--;
227 recurse_func (this, lookup_index);
228 nesting_level_left++;
231 void set_lookup_visited (unsigned lookup_index)
232 { visited_lookups->add (lookup_index); }
234 void set_lookup_inactive (unsigned lookup_index)
235 { inactive_lookups->add (lookup_index); }
237 bool lookup_limit_exceeded ()
239 bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
241 DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
244 bool is_lookup_visited (unsigned lookup_index)
246 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
248 DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
249 lookup_count, lookup_index);
253 if (unlikely (visited_lookups->in_error ()))
256 return visited_lookups->has (lookup_index);
260 const hb_set_t *glyphs;
261 recurse_func_t recurse_func;
262 unsigned int nesting_level_left;
264 hb_closure_lookups_context_t (hb_face_t *face_,
265 const hb_set_t *glyphs_,
266 hb_set_t *visited_lookups_,
267 hb_set_t *inactive_lookups_,
268 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
271 recurse_func (nullptr),
272 nesting_level_left (nesting_level_left_),
273 visited_lookups (visited_lookups_),
274 inactive_lookups (inactive_lookups_),
277 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
280 hb_set_t *visited_lookups;
281 hb_set_t *inactive_lookups;
282 unsigned int lookup_count;
285 struct hb_would_apply_context_t :
286 hb_dispatch_context_t<hb_would_apply_context_t, bool>
288 template <typename T>
289 return_t dispatch (const T &obj) { return obj.would_apply (this); }
290 static return_t default_return_value () { return false; }
291 bool stop_sublookup_iteration (return_t r) const { return r; }
294 const hb_codepoint_t *glyphs;
298 hb_would_apply_context_t (hb_face_t *face_,
299 const hb_codepoint_t *glyphs_,
301 bool zero_context_) :
305 zero_context (zero_context_) {}
308 struct hb_collect_glyphs_context_t :
309 hb_dispatch_context_t<hb_collect_glyphs_context_t>
311 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
312 template <typename T>
313 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
314 static return_t default_return_value () { return hb_empty_t (); }
315 void recurse (unsigned int lookup_index)
317 if (unlikely (nesting_level_left == 0 || !recurse_func))
320 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
321 * past the previous check. For GSUB, we only want to collect the output
322 * glyphs in the recursion. If output is not requested, we can go home now.
324 * Note further, that the above is not exactly correct. A recursed lookup
325 * is allowed to match input that is not matched in the context, but that's
326 * not how most fonts are built. It's possible to relax that and recurse
327 * with all sets here if it proves to be an issue.
330 if (output == hb_set_get_empty ())
333 /* Return if new lookup was recursed to before. */
334 if (recursed_lookups->has (lookup_index))
337 hb_set_t *old_before = before;
338 hb_set_t *old_input = input;
339 hb_set_t *old_after = after;
340 before = input = after = hb_set_get_empty ();
342 nesting_level_left--;
343 recurse_func (this, lookup_index);
344 nesting_level_left++;
350 recursed_lookups->add (lookup_index);
358 recurse_func_t recurse_func;
359 hb_set_t *recursed_lookups;
360 unsigned int nesting_level_left;
362 hb_collect_glyphs_context_t (hb_face_t *face_,
363 hb_set_t *glyphs_before, /* OUT. May be NULL */
364 hb_set_t *glyphs_input, /* OUT. May be NULL */
365 hb_set_t *glyphs_after, /* OUT. May be NULL */
366 hb_set_t *glyphs_output, /* OUT. May be NULL */
367 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
369 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
370 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
371 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
372 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
373 recurse_func (nullptr),
374 recursed_lookups (hb_set_create ()),
375 nesting_level_left (nesting_level_left_) {}
376 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
378 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
383 template <typename set_t>
384 struct hb_collect_coverage_context_t :
385 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
387 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
388 template <typename T>
389 return_t dispatch (const T &obj) { return obj.get_coverage (); }
390 static return_t default_return_value () { return Null (Coverage); }
391 bool stop_sublookup_iteration (return_t r) const
393 r.collect_coverage (set);
397 hb_collect_coverage_context_t (set_t *set_) :
404 struct hb_ot_apply_context_t :
405 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
414 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
417 match_func (nullptr),
418 match_data (nullptr) {}
420 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
422 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
423 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
424 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
425 void set_mask (hb_mask_t mask_) { mask = mask_; }
426 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
427 void set_match_func (match_func_t match_func_,
428 const void *match_data_)
429 { match_func = match_func_; match_data = match_data_; }
437 may_match_t may_match (const hb_glyph_info_t &info,
438 const HBUINT16 *glyph_data) const
440 if (!(info.mask & mask) ||
441 (syllable && syllable != info.syllable ()))
445 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
456 may_skip_t may_skip (const hb_ot_apply_context_t *c,
457 const hb_glyph_info_t &info) const
459 if (!c->check_glyph_property (&info, lookup_props))
462 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
463 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
464 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
471 unsigned int lookup_props;
476 match_func_t match_func;
477 const void *match_data;
480 struct skipping_iterator_t
482 void init (hb_ot_apply_context_t *c_, bool context_match = false)
485 match_glyph_data = nullptr;
486 matcher.set_match_func (nullptr, nullptr);
487 matcher.set_lookup_props (c->lookup_props);
488 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
489 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
490 /* Ignore ZWJ if we are matching context, or asked to. */
491 matcher.set_ignore_zwj (context_match || c->auto_zwj);
492 matcher.set_mask (context_match ? -1 : c->lookup_mask);
494 void set_lookup_props (unsigned int lookup_props)
496 matcher.set_lookup_props (lookup_props);
498 void set_match_func (matcher_t::match_func_t match_func_,
499 const void *match_data_,
500 const HBUINT16 glyph_data[])
502 matcher.set_match_func (match_func_, match_data_);
503 match_glyph_data = glyph_data;
506 void reset (unsigned int start_index_,
507 unsigned int num_items_)
510 num_items = num_items_;
511 end = c->buffer->len;
512 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
518 if (match_glyph_data) match_glyph_data--;
521 matcher_t::may_skip_t
522 may_skip (const hb_glyph_info_t &info) const
523 { return matcher.may_skip (c, info); }
525 bool next (unsigned *unsafe_to = nullptr)
527 assert (num_items > 0);
528 while (idx + num_items < end)
531 const hb_glyph_info_t &info = c->buffer->info[idx];
533 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
534 if (unlikely (skip == matcher_t::SKIP_YES))
537 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
538 if (match == matcher_t::MATCH_YES ||
539 (match == matcher_t::MATCH_MAYBE &&
540 skip == matcher_t::SKIP_NO))
543 if (match_glyph_data) match_glyph_data++;
547 if (skip == matcher_t::SKIP_NO)
550 *unsafe_to = idx + 1;
558 bool prev (unsigned *unsafe_from = nullptr)
560 assert (num_items > 0);
561 while (idx > num_items - 1)
564 const hb_glyph_info_t &info = c->buffer->out_info[idx];
566 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
567 if (unlikely (skip == matcher_t::SKIP_YES))
570 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
571 if (match == matcher_t::MATCH_YES ||
572 (match == matcher_t::MATCH_MAYBE &&
573 skip == matcher_t::SKIP_NO))
576 if (match_glyph_data) match_glyph_data++;
580 if (skip == matcher_t::SKIP_NO)
583 *unsafe_from = hb_max (1u, idx) - 1u;
594 hb_ot_apply_context_t *c;
596 const HBUINT16 *match_glyph_data;
598 unsigned int num_items;
603 const char *get_name () { return "APPLY"; }
604 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
605 template <typename T>
606 return_t dispatch (const T &obj) { return obj.apply (this); }
607 static return_t default_return_value () { return false; }
608 bool stop_sublookup_iteration (return_t r) const { return r; }
609 return_t recurse (unsigned int sub_lookup_index)
611 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
612 return default_return_value ();
614 nesting_level_left--;
615 bool ret = recurse_func (this, sub_lookup_index);
616 nesting_level_left++;
620 skipping_iterator_t iter_input, iter_context;
625 recurse_func_t recurse_func;
627 const VariationStore &var_store;
629 hb_direction_t direction;
630 hb_mask_t lookup_mask;
631 unsigned int table_index; /* GSUB/GPOS */
632 unsigned int lookup_index;
633 unsigned int lookup_props;
634 unsigned int nesting_level_left;
636 bool has_glyph_classes;
641 uint32_t random_state;
644 hb_ot_apply_context_t (unsigned int table_index_,
646 hb_buffer_t *buffer_) :
647 iter_input (), iter_context (),
648 font (font_), face (font->face), buffer (buffer_),
649 recurse_func (nullptr),
651 #ifndef HB_NO_OT_LAYOUT
652 *face->table.GDEF->table
657 var_store (gdef.get_var_store ()),
658 direction (buffer_->props.direction),
660 table_index (table_index_),
661 lookup_index ((unsigned int) -1),
663 nesting_level_left (HB_MAX_NESTING_LEVEL),
664 has_glyph_classes (gdef.has_glyph_classes ()),
668 random_state (1) { init_iters (); }
672 iter_input.init (this, false);
673 iter_context.init (this, true);
676 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
677 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
678 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
679 void set_random (bool random_) { random = random_; }
680 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
681 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
682 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
684 uint32_t random_number ()
686 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
687 random_state = random_state * 48271 % 2147483647;
691 bool match_properties_mark (hb_codepoint_t glyph,
692 unsigned int glyph_props,
693 unsigned int match_props) const
695 /* If using mark filtering sets, the high short of
696 * match_props has the set index.
698 if (match_props & LookupFlag::UseMarkFilteringSet)
699 return gdef.mark_set_covers (match_props >> 16, glyph);
701 /* The second byte of match_props has the meaning
702 * "ignore marks of attachment type different than
703 * the attachment type specified."
705 if (match_props & LookupFlag::MarkAttachmentType)
706 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
711 bool check_glyph_property (const hb_glyph_info_t *info,
712 unsigned int match_props) const
714 hb_codepoint_t glyph = info->codepoint;
715 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
717 /* Not covered, if, for example, glyph class is ligature and
718 * match_props includes LookupFlags::IgnoreLigatures
720 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
723 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
724 return match_properties_mark (glyph, glyph_props, match_props);
729 void _set_glyph_class (hb_codepoint_t glyph_index,
730 unsigned int class_guess = 0,
731 bool ligature = false,
732 bool component = false) const
734 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
735 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
738 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
739 /* In the only place that the MULTIPLIED bit is used, Uniscribe
740 * seems to only care about the "last" transformation between
741 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
742 * and ligate again, it forgives the multiplication and acts as
743 * if only ligation happened. As such, clear MULTIPLIED bit.
745 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
748 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
749 if (likely (has_glyph_classes))
751 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
752 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef.get_glyph_props (glyph_index));
754 else if (class_guess)
756 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
757 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
760 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
763 void replace_glyph (hb_codepoint_t glyph_index) const
765 _set_glyph_class (glyph_index);
766 (void) buffer->replace_glyph (glyph_index);
768 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
770 _set_glyph_class (glyph_index);
771 buffer->cur().codepoint = glyph_index;
773 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
774 unsigned int class_guess) const
776 _set_glyph_class (glyph_index, class_guess, true);
777 (void) buffer->replace_glyph (glyph_index);
779 void output_glyph_for_component (hb_codepoint_t glyph_index,
780 unsigned int class_guess) const
782 _set_glyph_class (glyph_index, class_guess, false, true);
783 (void) buffer->output_glyph (glyph_index);
788 struct hb_get_subtables_context_t :
789 hb_dispatch_context_t<hb_get_subtables_context_t>
791 template <typename Type>
792 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
794 const Type *typed_obj = (const Type *) obj;
795 return typed_obj->apply (c);
798 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
800 struct hb_applicable_t
802 template <typename T>
803 void init (const T &obj_, hb_apply_func_t apply_func_)
806 apply_func = apply_func_;
808 obj_.get_coverage ().collect_coverage (&digest);
811 bool apply (OT::hb_ot_apply_context_t *c) const
813 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
818 hb_apply_func_t apply_func;
819 hb_set_digest_t digest;
822 typedef hb_vector_t<hb_applicable_t> array_t;
824 /* Dispatch interface. */
825 template <typename T>
826 return_t dispatch (const T &obj)
828 hb_applicable_t *entry = array.push();
829 entry->init (obj, apply_to<T>);
830 return hb_empty_t ();
832 static return_t default_return_value () { return hb_empty_t (); }
834 hb_get_subtables_context_t (array_t &array_) :
843 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
844 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
845 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
846 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
848 struct ContextClosureFuncs
850 intersects_func_t intersects;
851 intersected_glyphs_func_t intersected_glyphs;
853 struct ContextCollectGlyphsFuncs
855 collect_glyphs_func_t collect;
857 struct ContextApplyFuncs
863 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
865 return glyphs->has (value);
867 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
869 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
870 return class_def.intersects_class (glyphs, value);
872 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
874 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
875 return (data+coverage).intersects (glyphs);
879 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
881 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
882 intersected_glyphs->add (g);
884 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
886 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
887 class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
889 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
891 Offset16To<Coverage> coverage;
893 (data+coverage).intersected_coverage_glyphs (glyphs, intersected_glyphs);
897 static inline bool array_is_subset_of (const hb_set_t *glyphs,
899 const HBUINT16 values[],
900 intersects_func_t intersects_func,
901 const void *intersects_data)
903 for (const HBUINT16 &_ : + hb_iter (values, count))
904 if (!intersects_func (glyphs, _, intersects_data)) return false;
909 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
913 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
915 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
916 class_def.collect_class (glyphs, value);
918 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
920 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
921 (data+coverage).collect_coverage (glyphs);
923 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
926 const HBUINT16 values[],
927 collect_glyphs_func_t collect_func,
928 const void *collect_data)
931 + hb_iter (values, count)
932 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
937 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
939 return glyph_id == value;
941 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
943 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
944 return class_def.get_class (glyph_id) == value;
946 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
948 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
949 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
952 static inline bool would_match_input (hb_would_apply_context_t *c,
953 unsigned int count, /* Including the first glyph (not matched) */
954 const HBUINT16 input[], /* Array of input values--start with second glyph */
955 match_func_t match_func,
956 const void *match_data)
961 for (unsigned int i = 1; i < count; i++)
962 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
967 static inline bool match_input (hb_ot_apply_context_t *c,
968 unsigned int count, /* Including the first glyph (not matched) */
969 const HBUINT16 input[], /* Array of input values--start with second glyph */
970 match_func_t match_func,
971 const void *match_data,
972 unsigned int *end_position,
973 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
974 unsigned int *p_total_component_count = nullptr)
976 TRACE_APPLY (nullptr);
978 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
980 hb_buffer_t *buffer = c->buffer;
982 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
983 skippy_iter.reset (buffer->idx, count - 1);
984 skippy_iter.set_match_func (match_func, match_data, input);
987 * This is perhaps the trickiest part of OpenType... Remarks:
989 * - If all components of the ligature were marks, we call this a mark ligature.
991 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
992 * it as a ligature glyph.
994 * - Ligatures cannot be formed across glyphs attached to different components
995 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
996 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
997 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
998 * There are a couple of exceptions to this:
1000 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
1001 * assuming that the font designer knows what they are doing (otherwise it can
1002 * break Indic stuff when a matra wants to ligate with a conjunct,
1004 * o If two marks want to ligate and they belong to different components of the
1005 * same ligature glyph, and said ligature glyph is to be ignored according to
1006 * mark-filtering rules, then allow.
1007 * https://github.com/harfbuzz/harfbuzz/issues/545
1010 unsigned int total_component_count = 0;
1011 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1013 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1014 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1017 LIGBASE_NOT_CHECKED,
1018 LIGBASE_MAY_NOT_SKIP,
1020 } ligbase = LIGBASE_NOT_CHECKED;
1022 match_positions[0] = buffer->idx;
1023 for (unsigned int i = 1; i < count; i++)
1026 if (!skippy_iter.next (&unsafe_to))
1028 *end_position = unsafe_to;
1029 return_trace (false);
1032 match_positions[i] = skippy_iter.idx;
1034 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1035 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1037 if (first_lig_id && first_lig_comp)
1039 /* If first component was attached to a previous ligature component,
1040 * all subsequent components should be attached to the same ligature
1041 * component, otherwise we shouldn't ligate them... */
1042 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1044 /* ...unless, we are attached to a base ligature and that base
1045 * ligature is ignorable. */
1046 if (ligbase == LIGBASE_NOT_CHECKED)
1049 const auto *out = buffer->out_info;
1050 unsigned int j = buffer->out_len;
1051 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1053 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1062 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1063 ligbase = LIGBASE_MAY_SKIP;
1065 ligbase = LIGBASE_MAY_NOT_SKIP;
1068 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1069 return_trace (false);
1074 /* If first component was NOT attached to a previous ligature component,
1075 * all subsequent components should also NOT be attached to any ligature
1076 * component, unless they are attached to the first component itself! */
1077 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1078 return_trace (false);
1081 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1084 *end_position = skippy_iter.idx + 1;
1086 if (p_total_component_count)
1087 *p_total_component_count = total_component_count;
1089 return_trace (true);
1091 static inline bool ligate_input (hb_ot_apply_context_t *c,
1092 unsigned int count, /* Including the first glyph */
1093 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1094 unsigned int match_end,
1095 hb_codepoint_t lig_glyph,
1096 unsigned int total_component_count)
1098 TRACE_APPLY (nullptr);
1100 hb_buffer_t *buffer = c->buffer;
1102 buffer->merge_clusters (buffer->idx, match_end);
1104 /* - If a base and one or more marks ligate, consider that as a base, NOT
1105 * ligature, such that all following marks can still attach to it.
1106 * https://github.com/harfbuzz/harfbuzz/issues/1109
1108 * - If all components of the ligature were marks, we call this a mark ligature.
1109 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1110 * the ligature to keep its old ligature id. This will allow it to attach to
1111 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1112 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1113 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1114 * later, we don't want them to lose their ligature id/component, otherwise
1115 * GPOS will fail to correctly position the mark ligature on top of the
1116 * LAM,LAM,HEH ligature. See:
1117 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1119 * - If a ligature is formed of components that some of which are also ligatures
1120 * themselves, and those ligature components had marks attached to *their*
1121 * components, we have to attach the marks to the new ligature component
1122 * positions! Now *that*'s tricky! And these marks may be following the
1123 * last component of the whole sequence, so we should loop forward looking
1124 * for them and update them.
1126 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1127 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1128 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1129 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1130 * the new ligature with a component value of 2.
1132 * This in fact happened to a font... See:
1133 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1136 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1137 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1138 for (unsigned int i = 1; i < count; i++)
1139 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1141 is_base_ligature = false;
1142 is_mark_ligature = false;
1145 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1147 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1148 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1149 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1150 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1151 unsigned int components_so_far = last_num_components;
1155 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1156 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1158 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1161 c->replace_glyph_with_ligature (lig_glyph, klass);
1163 for (unsigned int i = 1; i < count; i++)
1165 while (buffer->idx < match_positions[i] && buffer->successful)
1169 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1171 this_comp = last_num_components;
1172 unsigned int new_lig_comp = components_so_far - last_num_components +
1173 hb_min (this_comp, last_num_components);
1174 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1176 (void) buffer->next_glyph ();
1179 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1180 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1181 components_so_far += last_num_components;
1183 /* Skip the base glyph */
1187 if (!is_mark_ligature && last_lig_id)
1189 /* Re-adjust components for any marks following. */
1190 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1192 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1194 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1195 if (!this_comp) break;
1197 unsigned new_lig_comp = components_so_far - last_num_components +
1198 hb_min (this_comp, last_num_components);
1199 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1202 return_trace (true);
1205 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1207 const HBUINT16 backtrack[],
1208 match_func_t match_func,
1209 const void *match_data,
1210 unsigned int *match_start)
1212 TRACE_APPLY (nullptr);
1214 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1215 skippy_iter.reset (c->buffer->backtrack_len (), count);
1216 skippy_iter.set_match_func (match_func, match_data, backtrack);
1218 for (unsigned int i = 0; i < count; i++)
1220 unsigned unsafe_from;
1221 if (!skippy_iter.prev (&unsafe_from))
1223 *match_start = unsafe_from;
1224 return_trace (false);
1228 *match_start = skippy_iter.idx;
1229 return_trace (true);
1232 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1234 const HBUINT16 lookahead[],
1235 match_func_t match_func,
1236 const void *match_data,
1237 unsigned int start_index,
1238 unsigned int *end_index)
1240 TRACE_APPLY (nullptr);
1242 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1243 skippy_iter.reset (start_index - 1, count);
1244 skippy_iter.set_match_func (match_func, match_data, lookahead);
1246 for (unsigned int i = 0; i < count; i++)
1249 if (!skippy_iter.next (&unsafe_to))
1251 *end_index = unsafe_to;
1252 return_trace (false);
1256 *end_index = skippy_iter.idx + 1;
1257 return_trace (true);
1264 bool serialize (hb_serialize_context_t *c,
1265 const hb_map_t *lookup_map) const
1267 TRACE_SERIALIZE (this);
1268 auto *out = c->embed (*this);
1269 if (unlikely (!out)) return_trace (false);
1271 return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
1274 bool sanitize (hb_sanitize_context_t *c) const
1276 TRACE_SANITIZE (this);
1277 return_trace (c->check_struct (this));
1280 HBUINT16 sequenceIndex; /* Index into current glyph
1281 * sequence--first glyph = 0 */
1282 HBUINT16 lookupListIndex; /* Lookup to apply to that
1283 * position--zero--based */
1285 DEFINE_SIZE_STATIC (4);
1288 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
1289 const hb_array_t<const LookupRecord> lookupRecords,
1290 const hb_map_t *lookup_map)
1293 for (const LookupRecord& r : lookupRecords)
1295 if (!lookup_map->has (r.lookupListIndex))
1298 if (!r.serialize (c, lookup_map))
1306 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1308 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1309 unsigned inputCount, const HBUINT16 input[],
1310 unsigned lookupCount,
1311 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1313 ContextFormat context_format,
1315 intersected_glyphs_func_t intersected_glyphs_func)
1317 hb_set_t *covered_seq_indicies = hb_set_create ();
1318 for (unsigned int i = 0; i < lookupCount; i++)
1320 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1321 if (seqIndex >= inputCount) continue;
1323 bool has_pos_glyphs = false;
1324 hb_set_t pos_glyphs;
1326 if (hb_set_is_empty (covered_seq_indicies) || !hb_set_has (covered_seq_indicies, seqIndex))
1328 has_pos_glyphs = true;
1331 switch (context_format) {
1332 case ContextFormat::SimpleContext:
1333 pos_glyphs.add (value);
1335 case ContextFormat::ClassBasedContext:
1336 intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs);
1338 case ContextFormat::CoverageBasedContext:
1339 pos_glyphs.set (c->parent_active_glyphs ());
1345 const void *input_data = input;
1346 unsigned input_value = seqIndex - 1;
1347 if (context_format != ContextFormat::SimpleContext)
1350 input_value = input[seqIndex - 1];
1353 intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs);
1357 covered_seq_indicies->add (seqIndex);
1358 if (has_pos_glyphs) {
1359 c->push_cur_active_glyphs () = pos_glyphs;
1361 c->push_cur_active_glyphs ().set (*c->glyphs);
1364 unsigned endIndex = inputCount;
1365 if (context_format == ContextFormat::CoverageBasedContext)
1368 c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
1370 c->pop_cur_done_glyphs ();
1373 hb_set_destroy (covered_seq_indicies);
1376 template <typename context_t>
1377 static inline void recurse_lookups (context_t *c,
1378 unsigned int lookupCount,
1379 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1381 for (unsigned int i = 0; i < lookupCount; i++)
1382 c->recurse (lookupRecord[i].lookupListIndex);
1385 static inline void apply_lookup (hb_ot_apply_context_t *c,
1386 unsigned int count, /* Including the first glyph */
1387 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1388 unsigned int lookupCount,
1389 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1390 unsigned int match_end)
1392 hb_buffer_t *buffer = c->buffer;
1395 /* All positions are distance from beginning of *output* buffer.
1398 unsigned int bl = buffer->backtrack_len ();
1399 end = bl + match_end - buffer->idx;
1401 int delta = bl - buffer->idx;
1402 /* Convert positions to new indexing. */
1403 for (unsigned int j = 0; j < count; j++)
1404 match_positions[j] += delta;
1407 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1409 unsigned int idx = lookupRecord[i].sequenceIndex;
1413 /* Don't recurse to ourself at same position.
1414 * Note that this test is too naive, it doesn't catch longer loops. */
1415 if (unlikely (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index))
1418 if (unlikely (!buffer->move_to (match_positions[idx])))
1421 if (unlikely (buffer->max_ops <= 0))
1424 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1425 if (!c->recurse (lookupRecord[i].lookupListIndex))
1428 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1429 int delta = new_len - orig_len;
1434 /* Recursed lookup changed buffer len. Adjust.
1438 * Right now, if buffer length increased by n, we assume n new glyphs
1439 * were added right after the current position, and if buffer length
1440 * was decreased by n, we assume n match positions after the current
1441 * one where removed. The former (buffer length increased) case is
1442 * fine, but the decrease case can be improved in at least two ways,
1443 * both of which are significant:
1445 * - If recursed-to lookup is MultipleSubst and buffer length
1446 * decreased, then it's current match position that was deleted,
1447 * NOT the one after it.
1449 * - If buffer length was decreased by n, it does not necessarily
1450 * mean that n match positions where removed, as there might
1451 * have been marks and default-ignorables in the sequence. We
1452 * should instead drop match positions between current-position
1453 * and current-position + n instead. Though, am not sure which
1454 * one is better. Both cases have valid uses. Sigh.
1456 * It should be possible to construct tests for both of these cases.
1460 if (end <= int (match_positions[idx]))
1462 /* End might end up being smaller than match_positions[idx] if the recursed
1463 * lookup ended up removing many items, more than we have had matched.
1464 * Just never rewind end back and get out of here.
1465 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1466 end = match_positions[idx];
1467 /* There can't be any further changes. */
1471 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1475 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1480 /* NOTE: delta is negative. */
1481 delta = hb_max (delta, (int) next - (int) count);
1486 memmove (match_positions + next + delta, match_positions + next,
1487 (count - next) * sizeof (match_positions[0]));
1491 /* Fill in new entries. */
1492 for (unsigned int j = idx + 1; j < next; j++)
1493 match_positions[j] = match_positions[j - 1] + 1;
1495 /* And fixup the rest. */
1496 for (; next < count; next++)
1497 match_positions[next] += delta;
1500 (void) buffer->move_to (end);
1505 /* Contextual lookups */
1507 struct ContextClosureLookupContext
1509 ContextClosureFuncs funcs;
1510 ContextFormat context_format;
1511 const void *intersects_data;
1514 struct ContextCollectGlyphsLookupContext
1516 ContextCollectGlyphsFuncs funcs;
1517 const void *collect_data;
1520 struct ContextApplyLookupContext
1522 ContextApplyFuncs funcs;
1523 const void *match_data;
1526 static inline bool context_intersects (const hb_set_t *glyphs,
1527 unsigned int inputCount, /* Including the first glyph (not matched) */
1528 const HBUINT16 input[], /* Array of input values--start with second glyph */
1529 ContextClosureLookupContext &lookup_context)
1531 return array_is_subset_of (glyphs,
1532 inputCount ? inputCount - 1 : 0, input,
1533 lookup_context.funcs.intersects, lookup_context.intersects_data);
1536 static inline void context_closure_lookup (hb_closure_context_t *c,
1537 unsigned int inputCount, /* Including the first glyph (not matched) */
1538 const HBUINT16 input[], /* Array of input values--start with second glyph */
1539 unsigned int lookupCount,
1540 const LookupRecord lookupRecord[],
1541 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1542 ContextClosureLookupContext &lookup_context)
1544 if (context_intersects (c->glyphs,
1547 context_closure_recurse_lookups (c,
1549 lookupCount, lookupRecord,
1551 lookup_context.context_format,
1552 lookup_context.intersects_data,
1553 lookup_context.funcs.intersected_glyphs);
1556 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1557 unsigned int inputCount, /* Including the first glyph (not matched) */
1558 const HBUINT16 input[], /* Array of input values--start with second glyph */
1559 unsigned int lookupCount,
1560 const LookupRecord lookupRecord[],
1561 ContextCollectGlyphsLookupContext &lookup_context)
1563 collect_array (c, c->input,
1564 inputCount ? inputCount - 1 : 0, input,
1565 lookup_context.funcs.collect, lookup_context.collect_data);
1567 lookupCount, lookupRecord);
1570 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1571 unsigned int inputCount, /* Including the first glyph (not matched) */
1572 const HBUINT16 input[], /* Array of input values--start with second glyph */
1573 unsigned int lookupCount HB_UNUSED,
1574 const LookupRecord lookupRecord[] HB_UNUSED,
1575 ContextApplyLookupContext &lookup_context)
1577 return would_match_input (c,
1579 lookup_context.funcs.match, lookup_context.match_data);
1581 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1582 unsigned int inputCount, /* Including the first glyph (not matched) */
1583 const HBUINT16 input[], /* Array of input values--start with second glyph */
1584 unsigned int lookupCount,
1585 const LookupRecord lookupRecord[],
1586 ContextApplyLookupContext &lookup_context)
1588 unsigned match_end = 0;
1589 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
1592 lookup_context.funcs.match, lookup_context.match_data,
1593 &match_end, match_positions))
1595 c->buffer->unsafe_to_break (c->buffer->idx, match_end);
1597 inputCount, match_positions,
1598 lookupCount, lookupRecord,
1604 c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
1611 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1613 return context_intersects (glyphs,
1614 inputCount, inputZ.arrayZ,
1618 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
1620 if (unlikely (c->lookup_limit_exceeded ())) return;
1622 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1623 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1624 context_closure_lookup (c,
1625 inputCount, inputZ.arrayZ,
1626 lookupCount, lookupRecord.arrayZ,
1627 value, lookup_context);
1630 void closure_lookups (hb_closure_lookups_context_t *c,
1631 ContextClosureLookupContext &lookup_context) const
1633 if (unlikely (c->lookup_limit_exceeded ())) return;
1634 if (!intersects (c->glyphs, lookup_context)) return;
1636 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1637 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1638 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1641 void collect_glyphs (hb_collect_glyphs_context_t *c,
1642 ContextCollectGlyphsLookupContext &lookup_context) const
1644 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1645 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1646 context_collect_glyphs_lookup (c,
1647 inputCount, inputZ.arrayZ,
1648 lookupCount, lookupRecord.arrayZ,
1652 bool would_apply (hb_would_apply_context_t *c,
1653 ContextApplyLookupContext &lookup_context) const
1655 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1656 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1657 return context_would_apply_lookup (c,
1658 inputCount, inputZ.arrayZ,
1659 lookupCount, lookupRecord.arrayZ,
1663 bool apply (hb_ot_apply_context_t *c,
1664 ContextApplyLookupContext &lookup_context) const
1667 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1668 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1669 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1672 bool serialize (hb_serialize_context_t *c,
1673 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1674 const hb_map_t *lookup_map) const
1676 TRACE_SERIALIZE (this);
1677 auto *out = c->start_embed (this);
1678 if (unlikely (!c->extend_min (out))) return_trace (false);
1680 out->inputCount = inputCount;
1681 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1682 for (const auto org : input)
1685 d = input_mapping->get (org);
1689 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1690 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1692 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
1693 return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
1696 bool subset (hb_subset_context_t *c,
1697 const hb_map_t *lookup_map,
1698 const hb_map_t *klass_map = nullptr) const
1700 TRACE_SUBSET (this);
1701 if (unlikely (!inputCount)) return_trace (false);
1702 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1704 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1705 if (!hb_all (input, mapping)) return_trace (false);
1706 return_trace (serialize (c->serializer, mapping, lookup_map));
1710 bool sanitize (hb_sanitize_context_t *c) const
1712 TRACE_SANITIZE (this);
1713 return_trace (inputCount.sanitize (c) &&
1714 lookupCount.sanitize (c) &&
1715 c->check_range (inputZ.arrayZ,
1716 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1717 LookupRecord::static_size * lookupCount));
1721 HBUINT16 inputCount; /* Total number of glyphs in input
1722 * glyph sequence--includes the first
1724 HBUINT16 lookupCount; /* Number of LookupRecords */
1725 UnsizedArrayOf<HBUINT16>
1726 inputZ; /* Array of match inputs--start with
1728 /*UnsizedArrayOf<LookupRecord>
1729 lookupRecordX;*/ /* Array of LookupRecords--in
1732 DEFINE_SIZE_ARRAY (4, inputZ);
1737 bool intersects (const hb_set_t *glyphs,
1738 ContextClosureLookupContext &lookup_context) const
1742 | hb_map (hb_add (this))
1743 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1748 void closure (hb_closure_context_t *c, unsigned value,
1749 ContextClosureLookupContext &lookup_context) const
1751 if (unlikely (c->lookup_limit_exceeded ())) return;
1755 | hb_map (hb_add (this))
1756 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
1760 void closure_lookups (hb_closure_lookups_context_t *c,
1761 ContextClosureLookupContext &lookup_context) const
1763 if (unlikely (c->lookup_limit_exceeded ())) return;
1765 | hb_map (hb_add (this))
1766 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
1770 void collect_glyphs (hb_collect_glyphs_context_t *c,
1771 ContextCollectGlyphsLookupContext &lookup_context) const
1775 | hb_map (hb_add (this))
1776 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1780 bool would_apply (hb_would_apply_context_t *c,
1781 ContextApplyLookupContext &lookup_context) const
1785 | hb_map (hb_add (this))
1786 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1791 bool apply (hb_ot_apply_context_t *c,
1792 ContextApplyLookupContext &lookup_context) const
1797 | hb_map (hb_add (this))
1798 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1804 bool subset (hb_subset_context_t *c,
1805 const hb_map_t *lookup_map,
1806 const hb_map_t *klass_map = nullptr) const
1808 TRACE_SUBSET (this);
1810 auto snap = c->serializer->snapshot ();
1811 auto *out = c->serializer->start_embed (*this);
1812 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1814 for (const Offset16To<Rule>& _ : rule)
1817 auto o_snap = c->serializer->snapshot ();
1818 auto *o = out->rule.serialize_append (c->serializer);
1819 if (unlikely (!o)) continue;
1821 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1824 c->serializer->revert (o_snap);
1828 bool ret = bool (out->rule);
1829 if (!ret) c->serializer->revert (snap);
1834 bool sanitize (hb_sanitize_context_t *c) const
1836 TRACE_SANITIZE (this);
1837 return_trace (rule.sanitize (c, this));
1841 Array16OfOffset16To<Rule>
1842 rule; /* Array of Rule tables
1843 * ordered by preference */
1845 DEFINE_SIZE_ARRAY (2, rule);
1849 struct ContextFormat1
1851 bool intersects (const hb_set_t *glyphs) const
1853 struct ContextClosureLookupContext lookup_context = {
1854 {intersects_glyph, intersected_glyph},
1855 ContextFormat::SimpleContext,
1860 + hb_zip (this+coverage, ruleSet)
1861 | hb_filter (*glyphs, hb_first)
1862 | hb_map (hb_second)
1863 | hb_map (hb_add (this))
1864 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1869 bool may_have_non_1to1 () const
1872 void closure (hb_closure_context_t *c) const
1874 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
1875 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
1878 struct ContextClosureLookupContext lookup_context = {
1879 {intersects_glyph, intersected_glyph},
1880 ContextFormat::SimpleContext,
1884 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
1885 | hb_filter ([&] (hb_codepoint_t _) {
1886 return c->previous_parent_active_glyphs ().has (_);
1888 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
1889 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
1892 c->pop_cur_done_glyphs ();
1895 void closure_lookups (hb_closure_lookups_context_t *c) const
1897 struct ContextClosureLookupContext lookup_context = {
1898 {intersects_glyph, intersected_glyph},
1899 ContextFormat::SimpleContext,
1903 + hb_zip (this+coverage, ruleSet)
1904 | hb_filter (*c->glyphs, hb_first)
1905 | hb_map (hb_second)
1906 | hb_map (hb_add (this))
1907 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
1911 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1913 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1915 (this+coverage).collect_coverage (c->input);
1917 struct ContextCollectGlyphsLookupContext lookup_context = {
1923 | hb_map (hb_add (this))
1924 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1928 bool would_apply (hb_would_apply_context_t *c) const
1930 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1931 struct ContextApplyLookupContext lookup_context = {
1935 return rule_set.would_apply (c, lookup_context);
1938 const Coverage &get_coverage () const { return this+coverage; }
1940 bool apply (hb_ot_apply_context_t *c) const
1943 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1944 if (likely (index == NOT_COVERED))
1945 return_trace (false);
1947 const RuleSet &rule_set = this+ruleSet[index];
1948 struct ContextApplyLookupContext lookup_context = {
1952 return_trace (rule_set.apply (c, lookup_context));
1955 bool subset (hb_subset_context_t *c) const
1957 TRACE_SUBSET (this);
1958 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1959 const hb_map_t &glyph_map = *c->plan->glyph_map;
1961 auto *out = c->serializer->start_embed (*this);
1962 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1963 out->format = format;
1965 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1966 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1967 + hb_zip (this+coverage, ruleSet)
1968 | hb_filter (glyphset, hb_first)
1969 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1971 | hb_map (glyph_map)
1972 | hb_sink (new_coverage)
1975 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
1976 return_trace (bool (new_coverage));
1979 bool sanitize (hb_sanitize_context_t *c) const
1981 TRACE_SANITIZE (this);
1982 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1986 HBUINT16 format; /* Format identifier--format = 1 */
1987 Offset16To<Coverage>
1988 coverage; /* Offset to Coverage table--from
1989 * beginning of table */
1990 Array16OfOffset16To<RuleSet>
1991 ruleSet; /* Array of RuleSet tables
1992 * ordered by Coverage Index */
1994 DEFINE_SIZE_ARRAY (6, ruleSet);
1998 struct ContextFormat2
2000 bool intersects (const hb_set_t *glyphs) const
2002 if (!(this+coverage).intersects (glyphs))
2005 const ClassDef &class_def = this+classDef;
2007 struct ContextClosureLookupContext lookup_context = {
2008 {intersects_class, intersected_class_glyphs},
2009 ContextFormat::ClassBasedContext,
2013 hb_set_t retained_coverage_glyphs;
2014 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
2016 hb_set_t coverage_glyph_classes;
2017 class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2022 | hb_map (hb_add (this))
2024 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2025 { return class_def.intersects_class (glyphs, p.first) &&
2026 coverage_glyph_classes.has (p.first) &&
2027 p.second.intersects (glyphs, lookup_context); })
2032 bool may_have_non_1to1 () const
2035 void closure (hb_closure_context_t *c) const
2037 if (!(this+coverage).intersects (c->glyphs))
2040 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2041 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2044 const ClassDef &class_def = this+classDef;
2046 struct ContextClosureLookupContext lookup_context = {
2047 {intersects_class, intersected_class_glyphs},
2048 ContextFormat::ClassBasedContext,
2052 + hb_enumerate (ruleSet)
2053 | hb_filter ([&] (unsigned _)
2054 { return class_def.intersects_class (&c->parent_active_glyphs (), _); },
2056 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<RuleSet>&> _)
2058 const RuleSet& rule_set = this+_.second;
2059 rule_set.closure (c, _.first, lookup_context);
2063 c->pop_cur_done_glyphs ();
2066 void closure_lookups (hb_closure_lookups_context_t *c) const
2068 if (!(this+coverage).intersects (c->glyphs))
2071 const ClassDef &class_def = this+classDef;
2073 struct ContextClosureLookupContext lookup_context = {
2074 {intersects_class, intersected_class_glyphs},
2075 ContextFormat::ClassBasedContext,
2080 | hb_map (hb_add (this))
2082 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2083 { return class_def.intersects_class (c->glyphs, p.first); })
2084 | hb_map (hb_second)
2085 | hb_apply ([&] (const RuleSet & _)
2086 { _.closure_lookups (c, lookup_context); });
2089 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2091 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2093 (this+coverage).collect_coverage (c->input);
2095 const ClassDef &class_def = this+classDef;
2096 struct ContextCollectGlyphsLookupContext lookup_context = {
2102 | hb_map (hb_add (this))
2103 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2107 bool would_apply (hb_would_apply_context_t *c) const
2109 const ClassDef &class_def = this+classDef;
2110 unsigned int index = class_def.get_class (c->glyphs[0]);
2111 const RuleSet &rule_set = this+ruleSet[index];
2112 struct ContextApplyLookupContext lookup_context = {
2116 return rule_set.would_apply (c, lookup_context);
2119 const Coverage &get_coverage () const { return this+coverage; }
2121 bool apply (hb_ot_apply_context_t *c) const
2124 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2125 if (likely (index == NOT_COVERED)) return_trace (false);
2127 const ClassDef &class_def = this+classDef;
2128 index = class_def.get_class (c->buffer->cur().codepoint);
2129 const RuleSet &rule_set = this+ruleSet[index];
2130 struct ContextApplyLookupContext lookup_context = {
2134 return_trace (rule_set.apply (c, lookup_context));
2137 bool subset (hb_subset_context_t *c) const
2139 TRACE_SUBSET (this);
2140 auto *out = c->serializer->start_embed (*this);
2141 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2142 out->format = format;
2143 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2144 return_trace (false);
2147 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2149 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
2150 hb_set_t retained_coverage_glyphs;
2151 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
2153 hb_set_t coverage_glyph_classes;
2154 (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2156 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2158 int non_zero_index = -1, index = 0;
2159 for (const auto& _ : + hb_enumerate (ruleSet)
2160 | hb_filter (klass_map, hb_first))
2162 auto *o = out->ruleSet.serialize_append (c->serializer);
2169 if (coverage_glyph_classes.has (_.first) &&
2170 o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
2171 non_zero_index = index;
2176 if (!ret || non_zero_index == -1) return_trace (false);
2178 //prune empty trailing ruleSets
2180 while (index > non_zero_index)
2182 out->ruleSet.pop ();
2186 return_trace (bool (out->ruleSet));
2189 bool sanitize (hb_sanitize_context_t *c) const
2191 TRACE_SANITIZE (this);
2192 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2196 HBUINT16 format; /* Format identifier--format = 2 */
2197 Offset16To<Coverage>
2198 coverage; /* Offset to Coverage table--from
2199 * beginning of table */
2200 Offset16To<ClassDef>
2201 classDef; /* Offset to glyph ClassDef table--from
2202 * beginning of table */
2203 Array16OfOffset16To<RuleSet>
2204 ruleSet; /* Array of RuleSet tables
2205 * ordered by class */
2207 DEFINE_SIZE_ARRAY (8, ruleSet);
2211 struct ContextFormat3
2213 bool intersects (const hb_set_t *glyphs) const
2215 if (!(this+coverageZ[0]).intersects (glyphs))
2218 struct ContextClosureLookupContext lookup_context = {
2219 {intersects_coverage, intersected_coverage_glyphs},
2220 ContextFormat::CoverageBasedContext,
2223 return context_intersects (glyphs,
2224 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2228 bool may_have_non_1to1 () const
2231 void closure (hb_closure_context_t *c) const
2233 if (!(this+coverageZ[0]).intersects (c->glyphs))
2236 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2237 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2241 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2242 struct ContextClosureLookupContext lookup_context = {
2243 {intersects_coverage, intersected_coverage_glyphs},
2244 ContextFormat::CoverageBasedContext,
2247 context_closure_lookup (c,
2248 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2249 lookupCount, lookupRecord,
2252 c->pop_cur_done_glyphs ();
2255 void closure_lookups (hb_closure_lookups_context_t *c) const
2257 if (!intersects (c->glyphs))
2259 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2260 recurse_lookups (c, lookupCount, lookupRecord);
2263 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2265 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2267 (this+coverageZ[0]).collect_coverage (c->input);
2269 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2270 struct ContextCollectGlyphsLookupContext lookup_context = {
2275 context_collect_glyphs_lookup (c,
2276 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2277 lookupCount, lookupRecord,
2281 bool would_apply (hb_would_apply_context_t *c) const
2283 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2284 struct ContextApplyLookupContext lookup_context = {
2288 return context_would_apply_lookup (c,
2289 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2290 lookupCount, lookupRecord,
2294 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2296 bool apply (hb_ot_apply_context_t *c) const
2299 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2300 if (likely (index == NOT_COVERED)) return_trace (false);
2302 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2303 struct ContextApplyLookupContext lookup_context = {
2307 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2310 bool subset (hb_subset_context_t *c) const
2312 TRACE_SUBSET (this);
2313 auto *out = c->serializer->start_embed (this);
2314 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2316 out->format = format;
2317 out->glyphCount = glyphCount;
2319 auto coverages = coverageZ.as_array (glyphCount);
2321 for (const Offset16To<Coverage>& offset : coverages)
2323 /* TODO(subset) This looks like should not be necessary to write this way. */
2324 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2325 if (unlikely (!o)) return_trace (false);
2326 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2329 const UnsizedArrayOf<LookupRecord>& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
2330 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2333 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
2334 return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2337 bool sanitize (hb_sanitize_context_t *c) const
2339 TRACE_SANITIZE (this);
2340 if (!c->check_struct (this)) return_trace (false);
2341 unsigned int count = glyphCount;
2342 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2343 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2344 for (unsigned int i = 0; i < count; i++)
2345 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2346 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2347 return_trace (c->check_array (lookupRecord, lookupCount));
2351 HBUINT16 format; /* Format identifier--format = 3 */
2352 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2354 HBUINT16 lookupCount; /* Number of LookupRecords */
2355 UnsizedArrayOf<Offset16To<Coverage>>
2356 coverageZ; /* Array of offsets to Coverage
2357 * table in glyph sequence order */
2358 /*UnsizedArrayOf<LookupRecord>
2359 lookupRecordX;*/ /* Array of LookupRecords--in
2362 DEFINE_SIZE_ARRAY (6, coverageZ);
2367 template <typename context_t, typename ...Ts>
2368 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2370 TRACE_DISPATCH (this, u.format);
2371 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2373 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2374 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
2375 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2376 default:return_trace (c->default_return_value ());
2382 HBUINT16 format; /* Format identifier */
2383 ContextFormat1 format1;
2384 ContextFormat2 format2;
2385 ContextFormat3 format3;
2390 /* Chaining Contextual lookups */
2392 struct ChainContextClosureLookupContext
2394 ContextClosureFuncs funcs;
2395 ContextFormat context_format;
2396 const void *intersects_data[3];
2399 struct ChainContextCollectGlyphsLookupContext
2401 ContextCollectGlyphsFuncs funcs;
2402 const void *collect_data[3];
2405 struct ChainContextApplyLookupContext
2407 ContextApplyFuncs funcs;
2408 const void *match_data[3];
2411 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2412 unsigned int backtrackCount,
2413 const HBUINT16 backtrack[],
2414 unsigned int inputCount, /* Including the first glyph (not matched) */
2415 const HBUINT16 input[], /* Array of input values--start with second glyph */
2416 unsigned int lookaheadCount,
2417 const HBUINT16 lookahead[],
2418 ChainContextClosureLookupContext &lookup_context)
2420 return array_is_subset_of (glyphs,
2421 backtrackCount, backtrack,
2422 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2423 && array_is_subset_of (glyphs,
2424 inputCount ? inputCount - 1 : 0, input,
2425 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2426 && array_is_subset_of (glyphs,
2427 lookaheadCount, lookahead,
2428 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2431 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2432 unsigned int backtrackCount,
2433 const HBUINT16 backtrack[],
2434 unsigned int inputCount, /* Including the first glyph (not matched) */
2435 const HBUINT16 input[], /* Array of input values--start with second glyph */
2436 unsigned int lookaheadCount,
2437 const HBUINT16 lookahead[],
2438 unsigned int lookupCount,
2439 const LookupRecord lookupRecord[],
2441 ChainContextClosureLookupContext &lookup_context)
2443 if (chain_context_intersects (c->glyphs,
2444 backtrackCount, backtrack,
2446 lookaheadCount, lookahead,
2448 context_closure_recurse_lookups (c,
2450 lookupCount, lookupRecord,
2452 lookup_context.context_format,
2453 lookup_context.intersects_data[1],
2454 lookup_context.funcs.intersected_glyphs);
2457 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2458 unsigned int backtrackCount,
2459 const HBUINT16 backtrack[],
2460 unsigned int inputCount, /* Including the first glyph (not matched) */
2461 const HBUINT16 input[], /* Array of input values--start with second glyph */
2462 unsigned int lookaheadCount,
2463 const HBUINT16 lookahead[],
2464 unsigned int lookupCount,
2465 const LookupRecord lookupRecord[],
2466 ChainContextCollectGlyphsLookupContext &lookup_context)
2468 collect_array (c, c->before,
2469 backtrackCount, backtrack,
2470 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2471 collect_array (c, c->input,
2472 inputCount ? inputCount - 1 : 0, input,
2473 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2474 collect_array (c, c->after,
2475 lookaheadCount, lookahead,
2476 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2478 lookupCount, lookupRecord);
2481 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2482 unsigned int backtrackCount,
2483 const HBUINT16 backtrack[] HB_UNUSED,
2484 unsigned int inputCount, /* Including the first glyph (not matched) */
2485 const HBUINT16 input[], /* Array of input values--start with second glyph */
2486 unsigned int lookaheadCount,
2487 const HBUINT16 lookahead[] HB_UNUSED,
2488 unsigned int lookupCount HB_UNUSED,
2489 const LookupRecord lookupRecord[] HB_UNUSED,
2490 ChainContextApplyLookupContext &lookup_context)
2492 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2493 && would_match_input (c,
2495 lookup_context.funcs.match, lookup_context.match_data[1]);
2498 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2499 unsigned int backtrackCount,
2500 const HBUINT16 backtrack[],
2501 unsigned int inputCount, /* Including the first glyph (not matched) */
2502 const HBUINT16 input[], /* Array of input values--start with second glyph */
2503 unsigned int lookaheadCount,
2504 const HBUINT16 lookahead[],
2505 unsigned int lookupCount,
2506 const LookupRecord lookupRecord[],
2507 ChainContextApplyLookupContext &lookup_context)
2509 unsigned end_index = c->buffer->idx;
2510 unsigned match_end = 0;
2511 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
2512 if (!(match_input (c,
2514 lookup_context.funcs.match, lookup_context.match_data[1],
2515 &match_end, match_positions) && (end_index = match_end)
2516 && match_lookahead (c,
2517 lookaheadCount, lookahead,
2518 lookup_context.funcs.match, lookup_context.match_data[2],
2519 match_end, &end_index)))
2521 c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
2525 unsigned start_index = c->buffer->out_len;
2526 if (!match_backtrack (c,
2527 backtrackCount, backtrack,
2528 lookup_context.funcs.match, lookup_context.match_data[0],
2531 c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
2535 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
2537 inputCount, match_positions,
2538 lookupCount, lookupRecord,
2545 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2547 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2548 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2549 return chain_context_intersects (glyphs,
2550 backtrack.len, backtrack.arrayZ,
2551 input.lenP1, input.arrayZ,
2552 lookahead.len, lookahead.arrayZ,
2556 void closure (hb_closure_context_t *c, unsigned value,
2557 ChainContextClosureLookupContext &lookup_context) const
2559 if (unlikely (c->lookup_limit_exceeded ())) return;
2561 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2562 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2563 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2564 chain_context_closure_lookup (c,
2565 backtrack.len, backtrack.arrayZ,
2566 input.lenP1, input.arrayZ,
2567 lookahead.len, lookahead.arrayZ,
2568 lookup.len, lookup.arrayZ,
2573 void closure_lookups (hb_closure_lookups_context_t *c,
2574 ChainContextClosureLookupContext &lookup_context) const
2576 if (unlikely (c->lookup_limit_exceeded ())) return;
2577 if (!intersects (c->glyphs, lookup_context)) return;
2579 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2580 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2581 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2582 recurse_lookups (c, lookup.len, lookup.arrayZ);
2585 void collect_glyphs (hb_collect_glyphs_context_t *c,
2586 ChainContextCollectGlyphsLookupContext &lookup_context) const
2588 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2589 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2590 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2591 chain_context_collect_glyphs_lookup (c,
2592 backtrack.len, backtrack.arrayZ,
2593 input.lenP1, input.arrayZ,
2594 lookahead.len, lookahead.arrayZ,
2595 lookup.len, lookup.arrayZ,
2599 bool would_apply (hb_would_apply_context_t *c,
2600 ChainContextApplyLookupContext &lookup_context) const
2602 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2603 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2604 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2605 return chain_context_would_apply_lookup (c,
2606 backtrack.len, backtrack.arrayZ,
2607 input.lenP1, input.arrayZ,
2608 lookahead.len, lookahead.arrayZ, lookup.len,
2609 lookup.arrayZ, lookup_context);
2612 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2615 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2616 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2617 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2618 return_trace (chain_context_apply_lookup (c,
2619 backtrack.len, backtrack.arrayZ,
2620 input.lenP1, input.arrayZ,
2621 lookahead.len, lookahead.arrayZ, lookup.len,
2622 lookup.arrayZ, lookup_context));
2625 template<typename Iterator,
2626 hb_requires (hb_is_iterator (Iterator))>
2627 void serialize_array (hb_serialize_context_t *c,
2632 for (const auto g : it)
2633 c->copy ((HBUINT16) g);
2636 bool serialize (hb_serialize_context_t *c,
2637 const hb_map_t *lookup_map,
2638 const hb_map_t *backtrack_map,
2639 const hb_map_t *input_map = nullptr,
2640 const hb_map_t *lookahead_map = nullptr) const
2642 TRACE_SERIALIZE (this);
2643 auto *out = c->start_embed (this);
2644 if (unlikely (!out)) return_trace (false);
2646 const hb_map_t *mapping = backtrack_map;
2647 serialize_array (c, backtrack.len, + backtrack.iter ()
2648 | hb_map (mapping));
2650 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2651 if (input_map) mapping = input_map;
2652 serialize_array (c, input.lenP1, + input.iter ()
2653 | hb_map (mapping));
2655 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2656 if (lookahead_map) mapping = lookahead_map;
2657 serialize_array (c, lookahead.len, + lookahead.iter ()
2658 | hb_map (mapping));
2660 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
2662 HBUINT16* lookupCount = c->embed (&(lookupRecord.len));
2663 if (!lookupCount) return_trace (false);
2665 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (), lookup_map);
2666 return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2669 bool subset (hb_subset_context_t *c,
2670 const hb_map_t *lookup_map,
2671 const hb_map_t *backtrack_map = nullptr,
2672 const hb_map_t *input_map = nullptr,
2673 const hb_map_t *lookahead_map = nullptr) const
2675 TRACE_SUBSET (this);
2677 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2678 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2682 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2683 if (!hb_all (backtrack, glyphset) ||
2684 !hb_all (input, glyphset) ||
2685 !hb_all (lookahead, glyphset))
2686 return_trace (false);
2688 serialize (c->serializer, lookup_map, c->plan->glyph_map);
2692 if (!hb_all (backtrack, backtrack_map) ||
2693 !hb_all (input, input_map) ||
2694 !hb_all (lookahead, lookahead_map))
2695 return_trace (false);
2697 serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2700 return_trace (true);
2703 bool sanitize (hb_sanitize_context_t *c) const
2705 TRACE_SANITIZE (this);
2706 if (!backtrack.sanitize (c)) return_trace (false);
2707 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2708 if (!input.sanitize (c)) return_trace (false);
2709 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2710 if (!lookahead.sanitize (c)) return_trace (false);
2711 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2712 return_trace (lookup.sanitize (c));
2717 backtrack; /* Array of backtracking values
2718 * (to be matched before the input
2720 HeadlessArrayOf<HBUINT16>
2721 inputX; /* Array of input values (start with
2724 lookaheadX; /* Array of lookahead values's (to be
2725 * matched after the input sequence) */
2726 Array16Of<LookupRecord>
2727 lookupX; /* Array of LookupRecords--in
2730 DEFINE_SIZE_MIN (8);
2735 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2739 | hb_map (hb_add (this))
2740 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2744 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
2746 if (unlikely (c->lookup_limit_exceeded ())) return;
2750 | hb_map (hb_add (this))
2751 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
2755 void closure_lookups (hb_closure_lookups_context_t *c,
2756 ChainContextClosureLookupContext &lookup_context) const
2758 if (unlikely (c->lookup_limit_exceeded ())) return;
2761 | hb_map (hb_add (this))
2762 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
2766 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2770 | hb_map (hb_add (this))
2771 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2775 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2779 | hb_map (hb_add (this))
2780 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2785 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2790 | hb_map (hb_add (this))
2791 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2797 bool subset (hb_subset_context_t *c,
2798 const hb_map_t *lookup_map,
2799 const hb_map_t *backtrack_klass_map = nullptr,
2800 const hb_map_t *input_klass_map = nullptr,
2801 const hb_map_t *lookahead_klass_map = nullptr) const
2803 TRACE_SUBSET (this);
2805 auto snap = c->serializer->snapshot ();
2806 auto *out = c->serializer->start_embed (*this);
2807 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2809 for (const Offset16To<ChainRule>& _ : rule)
2812 auto o_snap = c->serializer->snapshot ();
2813 auto *o = out->rule.serialize_append (c->serializer);
2814 if (unlikely (!o)) continue;
2816 if (!o->serialize_subset (c, _, this,
2818 backtrack_klass_map,
2820 lookahead_klass_map))
2823 c->serializer->revert (o_snap);
2827 bool ret = bool (out->rule);
2828 if (!ret) c->serializer->revert (snap);
2833 bool sanitize (hb_sanitize_context_t *c) const
2835 TRACE_SANITIZE (this);
2836 return_trace (rule.sanitize (c, this));
2840 Array16OfOffset16To<ChainRule>
2841 rule; /* Array of ChainRule tables
2842 * ordered by preference */
2844 DEFINE_SIZE_ARRAY (2, rule);
2847 struct ChainContextFormat1
2849 bool intersects (const hb_set_t *glyphs) const
2851 struct ChainContextClosureLookupContext lookup_context = {
2852 {intersects_glyph, intersected_glyph},
2853 ContextFormat::SimpleContext,
2854 {nullptr, nullptr, nullptr}
2858 + hb_zip (this+coverage, ruleSet)
2859 | hb_filter (*glyphs, hb_first)
2860 | hb_map (hb_second)
2861 | hb_map (hb_add (this))
2862 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2867 bool may_have_non_1to1 () const
2870 void closure (hb_closure_context_t *c) const
2872 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2873 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2876 struct ChainContextClosureLookupContext lookup_context = {
2877 {intersects_glyph, intersected_glyph},
2878 ContextFormat::SimpleContext,
2879 {nullptr, nullptr, nullptr}
2882 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2883 | hb_filter ([&] (hb_codepoint_t _) {
2884 return c->previous_parent_active_glyphs ().has (_);
2886 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
2887 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2890 c->pop_cur_done_glyphs ();
2893 void closure_lookups (hb_closure_lookups_context_t *c) const
2895 struct ChainContextClosureLookupContext lookup_context = {
2896 {intersects_glyph, intersected_glyph},
2897 ContextFormat::SimpleContext,
2898 {nullptr, nullptr, nullptr}
2901 + hb_zip (this+coverage, ruleSet)
2902 | hb_filter (*c->glyphs, hb_first)
2903 | hb_map (hb_second)
2904 | hb_map (hb_add (this))
2905 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
2909 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2911 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2913 (this+coverage).collect_coverage (c->input);
2915 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2917 {nullptr, nullptr, nullptr}
2921 | hb_map (hb_add (this))
2922 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2926 bool would_apply (hb_would_apply_context_t *c) const
2928 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2929 struct ChainContextApplyLookupContext lookup_context = {
2931 {nullptr, nullptr, nullptr}
2933 return rule_set.would_apply (c, lookup_context);
2936 const Coverage &get_coverage () const { return this+coverage; }
2938 bool apply (hb_ot_apply_context_t *c) const
2941 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2942 if (likely (index == NOT_COVERED)) return_trace (false);
2944 const ChainRuleSet &rule_set = this+ruleSet[index];
2945 struct ChainContextApplyLookupContext lookup_context = {
2947 {nullptr, nullptr, nullptr}
2949 return_trace (rule_set.apply (c, lookup_context));
2952 bool subset (hb_subset_context_t *c) const
2954 TRACE_SUBSET (this);
2955 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2956 const hb_map_t &glyph_map = *c->plan->glyph_map;
2958 auto *out = c->serializer->start_embed (*this);
2959 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2960 out->format = format;
2962 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2963 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2964 + hb_zip (this+coverage, ruleSet)
2965 | hb_filter (glyphset, hb_first)
2966 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2968 | hb_map (glyph_map)
2969 | hb_sink (new_coverage)
2972 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
2973 return_trace (bool (new_coverage));
2976 bool sanitize (hb_sanitize_context_t *c) const
2978 TRACE_SANITIZE (this);
2979 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2983 HBUINT16 format; /* Format identifier--format = 1 */
2984 Offset16To<Coverage>
2985 coverage; /* Offset to Coverage table--from
2986 * beginning of table */
2987 Array16OfOffset16To<ChainRuleSet>
2988 ruleSet; /* Array of ChainRuleSet tables
2989 * ordered by Coverage Index */
2991 DEFINE_SIZE_ARRAY (6, ruleSet);
2994 struct ChainContextFormat2
2996 bool intersects (const hb_set_t *glyphs) const
2998 if (!(this+coverage).intersects (glyphs))
3001 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3002 const ClassDef &input_class_def = this+inputClassDef;
3003 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3005 struct ChainContextClosureLookupContext lookup_context = {
3006 {intersects_class, intersected_class_glyphs},
3007 ContextFormat::ClassBasedContext,
3008 {&backtrack_class_def,
3010 &lookahead_class_def}
3013 hb_set_t retained_coverage_glyphs;
3014 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
3016 hb_set_t coverage_glyph_classes;
3017 input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3021 | hb_map (hb_add (this))
3023 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
3024 { return input_class_def.intersects_class (glyphs, p.first) &&
3025 coverage_glyph_classes.has (p.first) &&
3026 p.second.intersects (glyphs, lookup_context); })
3031 bool may_have_non_1to1 () const
3034 void closure (hb_closure_context_t *c) const
3036 if (!(this+coverage).intersects (c->glyphs))
3039 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
3040 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
3044 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3045 const ClassDef &input_class_def = this+inputClassDef;
3046 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3048 struct ChainContextClosureLookupContext lookup_context = {
3049 {intersects_class, intersected_class_glyphs},
3050 ContextFormat::ClassBasedContext,
3051 {&backtrack_class_def,
3053 &lookahead_class_def}
3056 + hb_enumerate (ruleSet)
3057 | hb_filter ([&] (unsigned _)
3058 { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
3060 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<ChainRuleSet>&> _)
3062 const ChainRuleSet& chainrule_set = this+_.second;
3063 chainrule_set.closure (c, _.first, lookup_context);
3067 c->pop_cur_done_glyphs ();
3070 void closure_lookups (hb_closure_lookups_context_t *c) const
3072 if (!(this+coverage).intersects (c->glyphs))
3075 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3076 const ClassDef &input_class_def = this+inputClassDef;
3077 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3079 struct ChainContextClosureLookupContext lookup_context = {
3080 {intersects_class, intersected_class_glyphs},
3081 ContextFormat::ClassBasedContext,
3082 {&backtrack_class_def,
3084 &lookahead_class_def}
3088 | hb_map (hb_add (this))
3090 | hb_filter([&] (unsigned klass)
3091 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
3092 | hb_map (hb_second)
3093 | hb_apply ([&] (const ChainRuleSet &_)
3094 { _.closure_lookups (c, lookup_context); })
3098 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3100 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3102 (this+coverage).collect_coverage (c->input);
3104 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3105 const ClassDef &input_class_def = this+inputClassDef;
3106 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3108 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3110 {&backtrack_class_def,
3112 &lookahead_class_def}
3116 | hb_map (hb_add (this))
3117 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3121 bool would_apply (hb_would_apply_context_t *c) const
3123 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3124 const ClassDef &input_class_def = this+inputClassDef;
3125 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3127 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3128 const ChainRuleSet &rule_set = this+ruleSet[index];
3129 struct ChainContextApplyLookupContext lookup_context = {
3131 {&backtrack_class_def,
3133 &lookahead_class_def}
3135 return rule_set.would_apply (c, lookup_context);
3138 const Coverage &get_coverage () const { return this+coverage; }
3140 bool apply (hb_ot_apply_context_t *c) const
3143 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3144 if (likely (index == NOT_COVERED)) return_trace (false);
3146 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3147 const ClassDef &input_class_def = this+inputClassDef;
3148 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3150 index = input_class_def.get_class (c->buffer->cur().codepoint);
3151 const ChainRuleSet &rule_set = this+ruleSet[index];
3152 struct ChainContextApplyLookupContext lookup_context = {
3154 {&backtrack_class_def,
3156 &lookahead_class_def}
3158 return_trace (rule_set.apply (c, lookup_context));
3161 bool subset (hb_subset_context_t *c) const
3163 TRACE_SUBSET (this);
3164 auto *out = c->serializer->start_embed (*this);
3165 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3166 out->format = format;
3167 out->coverage.serialize_subset (c, coverage, this);
3169 hb_map_t backtrack_klass_map;
3170 hb_map_t input_klass_map;
3171 hb_map_t lookahead_klass_map;
3173 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3174 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3175 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3176 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3178 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3180 lookahead_klass_map)))
3181 return_trace (false);
3183 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
3184 hb_set_t retained_coverage_glyphs;
3185 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
3187 hb_set_t coverage_glyph_classes;
3188 (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3190 int non_zero_index = -1, index = 0;
3192 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3193 auto last_non_zero = c->serializer->snapshot ();
3194 for (const auto& _ : + hb_enumerate (ruleSet)
3195 | hb_filter (input_klass_map, hb_first))
3197 auto *o = out->ruleSet.serialize_append (c->serializer);
3203 if (coverage_glyph_classes.has (_.first) &&
3204 o->serialize_subset (c, _.second, this,
3206 &backtrack_klass_map,
3208 &lookahead_klass_map))
3210 last_non_zero = c->serializer->snapshot ();
3211 non_zero_index = index;
3217 if (!ret || non_zero_index == -1) return_trace (false);
3219 // prune empty trailing ruleSets
3220 if (index > non_zero_index) {
3221 c->serializer->revert (last_non_zero);
3222 out->ruleSet.len = non_zero_index + 1;
3225 return_trace (bool (out->ruleSet));
3228 bool sanitize (hb_sanitize_context_t *c) const
3230 TRACE_SANITIZE (this);
3231 return_trace (coverage.sanitize (c, this) &&
3232 backtrackClassDef.sanitize (c, this) &&
3233 inputClassDef.sanitize (c, this) &&
3234 lookaheadClassDef.sanitize (c, this) &&
3235 ruleSet.sanitize (c, this));
3239 HBUINT16 format; /* Format identifier--format = 2 */
3240 Offset16To<Coverage>
3241 coverage; /* Offset to Coverage table--from
3242 * beginning of table */
3243 Offset16To<ClassDef>
3244 backtrackClassDef; /* Offset to glyph ClassDef table
3245 * containing backtrack sequence
3246 * data--from beginning of table */
3247 Offset16To<ClassDef>
3248 inputClassDef; /* Offset to glyph ClassDef
3249 * table containing input sequence
3250 * data--from beginning of table */
3251 Offset16To<ClassDef>
3252 lookaheadClassDef; /* Offset to glyph ClassDef table
3253 * containing lookahead sequence
3254 * data--from beginning of table */
3255 Array16OfOffset16To<ChainRuleSet>
3256 ruleSet; /* Array of ChainRuleSet tables
3257 * ordered by class */
3259 DEFINE_SIZE_ARRAY (12, ruleSet);
3262 struct ChainContextFormat3
3264 bool intersects (const hb_set_t *glyphs) const
3266 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3268 if (!(this+input[0]).intersects (glyphs))
3271 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3272 struct ChainContextClosureLookupContext lookup_context = {
3273 {intersects_coverage, intersected_coverage_glyphs},
3274 ContextFormat::CoverageBasedContext,
3277 return chain_context_intersects (glyphs,
3278 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3279 input.len, (const HBUINT16 *) input.arrayZ + 1,
3280 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3284 bool may_have_non_1to1 () const
3287 void closure (hb_closure_context_t *c) const
3289 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3291 if (!(this+input[0]).intersects (c->glyphs))
3294 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
3295 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
3299 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3300 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3301 struct ChainContextClosureLookupContext lookup_context = {
3302 {intersects_coverage, intersected_coverage_glyphs},
3303 ContextFormat::CoverageBasedContext,
3306 chain_context_closure_lookup (c,
3307 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3308 input.len, (const HBUINT16 *) input.arrayZ + 1,
3309 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3310 lookup.len, lookup.arrayZ,
3313 c->pop_cur_done_glyphs ();
3316 void closure_lookups (hb_closure_lookups_context_t *c) const
3318 if (!intersects (c->glyphs))
3321 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3322 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3323 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3324 recurse_lookups (c, lookup.len, lookup.arrayZ);
3327 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3329 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3331 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3333 (this+input[0]).collect_coverage (c->input);
3335 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3336 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3337 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3341 chain_context_collect_glyphs_lookup (c,
3342 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3343 input.len, (const HBUINT16 *) input.arrayZ + 1,
3344 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3345 lookup.len, lookup.arrayZ,
3349 bool would_apply (hb_would_apply_context_t *c) const
3351 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3352 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3353 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3354 struct ChainContextApplyLookupContext lookup_context = {
3358 return chain_context_would_apply_lookup (c,
3359 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3360 input.len, (const HBUINT16 *) input.arrayZ + 1,
3361 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3362 lookup.len, lookup.arrayZ, lookup_context);
3365 const Coverage &get_coverage () const
3367 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3368 return this+input[0];
3371 bool apply (hb_ot_apply_context_t *c) const
3374 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3376 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
3377 if (likely (index == NOT_COVERED)) return_trace (false);
3379 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3380 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3381 struct ChainContextApplyLookupContext lookup_context = {
3385 return_trace (chain_context_apply_lookup (c,
3386 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3387 input.len, (const HBUINT16 *) input.arrayZ + 1,
3388 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3389 lookup.len, lookup.arrayZ, lookup_context));
3392 template<typename Iterator,
3393 hb_requires (hb_is_iterator (Iterator))>
3394 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3396 TRACE_SERIALIZE (this);
3397 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
3399 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
3400 return_trace (false);
3402 for (auto& offset : it) {
3403 auto *o = out->serialize_append (c->serializer);
3404 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
3405 return_trace (false);
3408 return_trace (true);
3411 bool subset (hb_subset_context_t *c) const
3413 TRACE_SUBSET (this);
3415 auto *out = c->serializer->start_embed (this);
3416 if (unlikely (!out)) return_trace (false);
3417 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3419 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3420 return_trace (false);
3422 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3423 if (!serialize_coverage_offsets (c, input.iter (), this))
3424 return_trace (false);
3426 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3427 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3428 return_trace (false);
3430 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
3431 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3433 HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookupRecord.len);
3434 if (!lookupCount) return_trace (false);
3436 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (), lookup_map);
3437 return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3440 bool sanitize (hb_sanitize_context_t *c) const
3442 TRACE_SANITIZE (this);
3443 if (!backtrack.sanitize (c, this)) return_trace (false);
3444 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3445 if (!input.sanitize (c, this)) return_trace (false);
3446 if (!input.len) return_trace (false); /* To be consistent with Context. */
3447 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3448 if (!lookahead.sanitize (c, this)) return_trace (false);
3449 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3450 return_trace (lookup.sanitize (c));
3454 HBUINT16 format; /* Format identifier--format = 3 */
3455 Array16OfOffset16To<Coverage>
3456 backtrack; /* Array of coverage tables
3457 * in backtracking sequence, in glyph
3459 Array16OfOffset16To<Coverage>
3460 inputX ; /* Array of coverage
3461 * tables in input sequence, in glyph
3463 Array16OfOffset16To<Coverage>
3464 lookaheadX; /* Array of coverage tables
3465 * in lookahead sequence, in glyph
3467 Array16Of<LookupRecord>
3468 lookupX; /* Array of LookupRecords--in
3471 DEFINE_SIZE_MIN (10);
3476 template <typename context_t, typename ...Ts>
3477 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3479 TRACE_DISPATCH (this, u.format);
3480 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3482 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3483 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
3484 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
3485 default:return_trace (c->default_return_value ());
3491 HBUINT16 format; /* Format identifier */
3492 ChainContextFormat1 format1;
3493 ChainContextFormat2 format2;
3494 ChainContextFormat3 format3;
3499 template <typename T>
3500 struct ExtensionFormat1
3502 unsigned int get_type () const { return extensionLookupType; }
3504 template <typename X>
3505 const X& get_subtable () const
3506 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
3508 template <typename context_t, typename ...Ts>
3509 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3511 TRACE_DISPATCH (this, format);
3512 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3513 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
3516 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3519 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
3520 bool sanitize (hb_sanitize_context_t *c) const
3522 TRACE_SANITIZE (this);
3523 return_trace (c->check_struct (this) &&
3524 extensionLookupType != T::SubTable::Extension);
3527 bool subset (hb_subset_context_t *c) const
3529 TRACE_SUBSET (this);
3531 auto *out = c->serializer->start_embed (this);
3532 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3534 out->format = format;
3535 out->extensionLookupType = extensionLookupType;
3537 const auto& src_offset =
3538 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
3540 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
3542 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
3546 HBUINT16 format; /* Format identifier. Set to 1. */
3547 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3548 * by ExtensionOffset (i.e. the
3549 * extension subtable). */
3550 Offset32 extensionOffset; /* Offset to the extension subtable,
3551 * of lookup type subtable. */
3553 DEFINE_SIZE_STATIC (8);
3556 template <typename T>
3559 unsigned int get_type () const
3562 case 1: return u.format1.get_type ();
3566 template <typename X>
3567 const X& get_subtable () const
3570 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3571 default:return Null (typename T::SubTable);
3575 // Specialization of dispatch for subset. dispatch() normally just
3576 // dispatches to the sub table this points too, but for subset
3577 // we need to run subset on this subtable too.
3578 template <typename ...Ts>
3579 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
3582 case 1: return u.format1.subset (c);
3583 default: return c->default_return_value ();
3587 template <typename context_t, typename ...Ts>
3588 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3590 TRACE_DISPATCH (this, u.format);
3591 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3593 case 1: return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
3594 default:return_trace (c->default_return_value ());
3600 HBUINT16 format; /* Format identifier */
3601 ExtensionFormat1<T> format1;
3610 struct hb_ot_layout_lookup_accelerator_t
3612 template <typename TLookup>
3613 void init (const TLookup &lookup)
3616 lookup.collect_coverage (&digest);
3619 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3620 lookup.dispatch (&c_get_subtables);
3622 void fini () { subtables.fini (); }
3624 bool may_have (hb_codepoint_t g) const
3625 { return digest.may_have (g); }
3627 bool apply (hb_ot_apply_context_t *c) const
3629 for (unsigned int i = 0; i < subtables.length; i++)
3630 if (subtables[i].apply (c))
3636 hb_set_digest_t digest;
3637 hb_get_subtables_context_t::array_t subtables;
3642 bool has_data () const { return version.to_int (); }
3643 unsigned int get_script_count () const
3644 { return (this+scriptList).len; }
3645 const Tag& get_script_tag (unsigned int i) const
3646 { return (this+scriptList).get_tag (i); }
3647 unsigned int get_script_tags (unsigned int start_offset,
3648 unsigned int *script_count /* IN/OUT */,
3649 hb_tag_t *script_tags /* OUT */) const
3650 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
3651 const Script& get_script (unsigned int i) const
3652 { return (this+scriptList)[i]; }
3653 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3654 { return (this+scriptList).find_index (tag, index); }
3656 unsigned int get_feature_count () const
3657 { return (this+featureList).len; }
3658 hb_tag_t get_feature_tag (unsigned int i) const
3659 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
3660 unsigned int get_feature_tags (unsigned int start_offset,
3661 unsigned int *feature_count /* IN/OUT */,
3662 hb_tag_t *feature_tags /* OUT */) const
3663 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
3664 const Feature& get_feature (unsigned int i) const
3665 { return (this+featureList)[i]; }
3666 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3667 { return (this+featureList).find_index (tag, index); }
3669 unsigned int get_lookup_count () const
3670 { return (this+lookupList).len; }
3671 const Lookup& get_lookup (unsigned int i) const
3672 { return (this+lookupList)[i]; }
3674 bool find_variations_index (const int *coords, unsigned int num_coords,
3675 unsigned int *index) const
3678 *index = FeatureVariations::NOT_FOUND_INDEX;
3681 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3682 .find_index (coords, num_coords, index);
3684 const Feature& get_feature_variation (unsigned int feature_index,
3685 unsigned int variations_index) const
3688 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3689 version.to_int () >= 0x00010001u)
3691 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3697 return get_feature (feature_index);
3700 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3701 hb_set_t *lookup_indexes /* OUT */) const
3704 if (version.to_int () >= 0x00010001u)
3705 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3709 template <typename TLookup>
3710 void closure_lookups (hb_face_t *face,
3711 const hb_set_t *glyphs,
3712 hb_set_t *lookup_indexes /* IN/OUT */) const
3714 hb_set_t visited_lookups, inactive_lookups;
3715 OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
3717 for (unsigned lookup_index : + hb_iter (lookup_indexes))
3718 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
3720 hb_set_union (lookup_indexes, &visited_lookups);
3721 hb_set_subtract (lookup_indexes, &inactive_lookups);
3724 void prune_langsys (const hb_map_t *duplicate_feature_map,
3725 hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map,
3726 hb_set_t *new_feature_indexes /* OUT */) const
3728 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
3730 unsigned count = get_script_count ();
3731 for (unsigned script_index = 0; script_index < count; script_index++)
3733 const Script& s = get_script (script_index);
3734 s.prune_langsys (&c, script_index);
3738 template <typename TLookup>
3739 bool subset (hb_subset_layout_context_t *c) const
3741 TRACE_SUBSET (this);
3742 auto *out = c->subset_context->serializer->embed (*this);
3743 if (unlikely (!out)) return_trace (false);
3745 typedef LookupOffsetList<TLookup> TLookupList;
3746 reinterpret_cast<Offset16To<TLookupList> &> (out->lookupList)
3747 .serialize_subset (c->subset_context,
3748 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList),
3752 reinterpret_cast<Offset16To<RecordListOfFeature> &> (out->featureList)
3753 .serialize_subset (c->subset_context,
3754 reinterpret_cast<const Offset16To<RecordListOfFeature> &> (featureList),
3758 out->scriptList.serialize_subset (c->subset_context,
3764 if (version.to_int () >= 0x00010001u)
3766 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3769 out->version.major = 1;
3770 out->version.minor = 0;
3775 return_trace (true);
3778 void find_duplicate_features (const hb_map_t *lookup_indices,
3779 const hb_set_t *feature_indices,
3780 hb_map_t *duplicate_feature_map /* OUT */) const
3782 if (feature_indices->is_empty ()) return;
3783 hb_hashmap_t<hb_tag_t, hb_set_t *> unique_features;
3784 //find out duplicate features after subset
3785 for (unsigned i : feature_indices->iter ())
3787 hb_tag_t t = get_feature_tag (i);
3788 if (t == HB_MAP_VALUE_INVALID) continue;
3789 if (!unique_features.has (t))
3791 hb_set_t* indices = hb_set_create ();
3792 if (unlikely (indices == hb_set_get_empty () ||
3793 !unique_features.set (t, indices)))
3795 hb_set_destroy (indices);
3796 for (auto _ : unique_features.iter ())
3797 hb_set_destroy (_.second);
3800 if (unique_features.get (t))
3801 unique_features.get (t)->add (i);
3802 duplicate_feature_map->set (i, i);
3808 hb_set_t* same_tag_features = unique_features.get (t);
3809 for (unsigned other_f_index : same_tag_features->iter ())
3811 const Feature& f = get_feature (i);
3812 const Feature& other_f = get_feature (other_f_index);
3815 + hb_iter (f.lookupIndex)
3816 | hb_filter (lookup_indices)
3820 + hb_iter (other_f.lookupIndex)
3821 | hb_filter (lookup_indices)
3824 bool is_equal = true;
3825 for (; f_iter && other_f_iter; f_iter++, other_f_iter++)
3827 unsigned a = *f_iter;
3828 unsigned b = *other_f_iter;
3829 if (a != b) { is_equal = false; break; }
3832 if (is_equal == false || f_iter || other_f_iter) continue;
3835 duplicate_feature_map->set (i, other_f_index);
3841 same_tag_features->add (i);
3842 duplicate_feature_map->set (i, i);
3846 for (auto _ : unique_features.iter ())
3847 hb_set_destroy (_.second);
3850 void prune_features (const hb_map_t *lookup_indices, /* IN */
3851 hb_set_t *feature_indices /* IN/OUT */) const
3854 // This is the set of feature indices which have alternate versions defined
3855 // if the FeatureVariation's table and the alternate version(s) intersect the
3856 // set of lookup indices.
3857 hb_set_t alternate_feature_indices;
3858 if (version.to_int () >= 0x00010001u)
3859 (this+featureVars).closure_features (lookup_indices, &alternate_feature_indices);
3860 if (unlikely (alternate_feature_indices.in_error()))
3862 feature_indices->err ();
3867 for (unsigned i : feature_indices->iter())
3869 const Feature& f = get_feature (i);
3870 hb_tag_t tag = get_feature_tag (i);
3871 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
3872 // Note: Never ever drop feature 'pref', even if it's empty.
3873 // HarfBuzz chooses shaper for Khmer based on presence of this
3874 // feature. See thread at:
3875 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
3879 if (!f.featureParams.is_null () &&
3880 tag == HB_TAG ('s', 'i', 'z', 'e'))
3883 if (!f.intersects_lookup_indexes (lookup_indices)
3885 && !alternate_feature_indices.has (i)
3888 feature_indices->del (i);
3892 unsigned int get_size () const
3895 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3898 template <typename TLookup>
3899 bool sanitize (hb_sanitize_context_t *c) const
3901 TRACE_SANITIZE (this);
3902 typedef List16OfOffset16To<TLookup> TLookupList;
3903 if (unlikely (!(version.sanitize (c) &&
3904 likely (version.major == 1) &&
3905 scriptList.sanitize (c, this) &&
3906 featureList.sanitize (c, this) &&
3907 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList).sanitize (c, this))))
3908 return_trace (false);
3911 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3912 return_trace (false);
3915 return_trace (true);
3918 template <typename T>
3919 struct accelerator_t
3921 accelerator_t (hb_face_t *face)
3923 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3924 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
3926 hb_blob_destroy (this->table.get_blob ());
3927 this->table = hb_blob_get_empty ();
3930 this->lookup_count = table->get_lookup_count ();
3932 this->accels = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3933 if (unlikely (!this->accels))
3935 this->lookup_count = 0;
3936 this->table.destroy ();
3937 this->table = hb_blob_get_empty ();
3940 for (unsigned int i = 0; i < this->lookup_count; i++)
3941 this->accels[i].init (table->get_lookup (i));
3945 for (unsigned int i = 0; i < this->lookup_count; i++)
3946 this->accels[i].fini ();
3947 hb_free (this->accels);
3948 this->table.destroy ();
3951 hb_blob_ptr_t<T> table;
3952 unsigned int lookup_count;
3953 hb_ot_layout_lookup_accelerator_t *accels;
3957 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3959 Offset16To<ScriptList>
3960 scriptList; /* ScriptList table */
3961 Offset16To<FeatureList>
3962 featureList; /* FeatureList table */
3963 Offset16To<LookupList>
3964 lookupList; /* LookupList table */
3965 Offset32To<FeatureVariations>
3966 featureVars; /* Offset to Feature Variations
3967 table--from beginning of table
3968 * (may be NULL). Introduced
3969 * in version 0x00010001. */
3971 DEFINE_SIZE_MIN (10);
3975 } /* namespace OT */
3978 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */