2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
33 #include "hb-buffer.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
49 static return_t default_return_value () { return false; }
50 bool stop_sublookup_iteration (return_t r) const { return r; }
52 const hb_set_t *glyphs;
54 hb_intersects_context_t (const hb_set_t *glyphs_) :
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
63 static return_t default_return_value () { return false; }
64 bool stop_sublookup_iteration (return_t r) const { return r; }
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
73 static return_t default_return_value () { return hb_empty_t (); }
74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
84 void reset_lookup_visit_count ()
87 bool lookup_limit_exceeded ()
88 { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
90 bool should_visit_lookup (unsigned int lookup_index)
92 if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
95 if (is_lookup_done (lookup_index))
101 bool is_lookup_done (unsigned int lookup_index)
103 if (done_lookups_glyph_count->in_error () ||
104 done_lookups_glyph_set->in_error ())
107 /* Have we visited this lookup with the current set of glyphs? */
108 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
110 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
112 if (!done_lookups_glyph_set->get (lookup_index))
114 hb_set_t* empty_set = hb_set_create ();
115 if (unlikely (!done_lookups_glyph_set->set (lookup_index, empty_set)))
117 hb_set_destroy (empty_set);
122 hb_set_clear (done_lookups_glyph_set->get (lookup_index));
125 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
126 if (unlikely (covered_glyph_set->in_error ()))
128 if (parent_active_glyphs ().is_subset (*covered_glyph_set))
131 covered_glyph_set->union_ (parent_active_glyphs ());
135 const hb_set_t& previous_parent_active_glyphs () {
136 if (active_glyphs_stack.length <= 1)
139 return active_glyphs_stack[active_glyphs_stack.length - 2];
142 const hb_set_t& parent_active_glyphs ()
144 if (!active_glyphs_stack)
147 return active_glyphs_stack.tail ();
150 hb_set_t& push_cur_active_glyphs ()
152 return *active_glyphs_stack.push ();
155 bool pop_cur_done_glyphs ()
157 if (active_glyphs_stack.length < 1)
160 active_glyphs_stack.pop ();
167 hb_vector_t<hb_set_t> active_glyphs_stack;
168 recurse_func_t recurse_func;
169 unsigned int nesting_level_left;
171 hb_closure_context_t (hb_face_t *face_,
173 hb_map_t *done_lookups_glyph_count_,
174 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set_,
175 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
178 recurse_func (nullptr),
179 nesting_level_left (nesting_level_left_),
180 done_lookups_glyph_count (done_lookups_glyph_count_),
181 done_lookups_glyph_set (done_lookups_glyph_set_),
185 ~hb_closure_context_t () { flush (); }
187 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
191 output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */
192 glyphs->union_ (*output);
194 active_glyphs_stack.pop ();
195 active_glyphs_stack.reset ();
199 hb_map_t *done_lookups_glyph_count;
200 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set;
201 unsigned int lookup_count;
206 struct hb_closure_lookups_context_t :
207 hb_dispatch_context_t<hb_closure_lookups_context_t>
209 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
210 template <typename T>
211 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
212 static return_t default_return_value () { return hb_empty_t (); }
213 void recurse (unsigned lookup_index)
215 if (unlikely (nesting_level_left == 0 || !recurse_func))
218 /* Return if new lookup was recursed to before. */
219 if (lookup_limit_exceeded ()
220 || visited_lookups->in_error ()
221 || visited_lookups->has (lookup_index))
222 // Don't increment lookup count here, that will be done in the call to closure_lookups()
223 // made by recurse_func.
226 nesting_level_left--;
227 recurse_func (this, lookup_index);
228 nesting_level_left++;
231 void set_lookup_visited (unsigned lookup_index)
232 { visited_lookups->add (lookup_index); }
234 void set_lookup_inactive (unsigned lookup_index)
235 { inactive_lookups->add (lookup_index); }
237 bool lookup_limit_exceeded ()
239 bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
241 DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
244 bool is_lookup_visited (unsigned lookup_index)
246 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
248 DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
249 lookup_count, lookup_index);
253 if (unlikely (visited_lookups->in_error ()))
256 return visited_lookups->has (lookup_index);
260 const hb_set_t *glyphs;
261 recurse_func_t recurse_func;
262 unsigned int nesting_level_left;
264 hb_closure_lookups_context_t (hb_face_t *face_,
265 const hb_set_t *glyphs_,
266 hb_set_t *visited_lookups_,
267 hb_set_t *inactive_lookups_,
268 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
271 recurse_func (nullptr),
272 nesting_level_left (nesting_level_left_),
273 visited_lookups (visited_lookups_),
274 inactive_lookups (inactive_lookups_),
277 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
280 hb_set_t *visited_lookups;
281 hb_set_t *inactive_lookups;
282 unsigned int lookup_count;
285 struct hb_would_apply_context_t :
286 hb_dispatch_context_t<hb_would_apply_context_t, bool>
288 template <typename T>
289 return_t dispatch (const T &obj) { return obj.would_apply (this); }
290 static return_t default_return_value () { return false; }
291 bool stop_sublookup_iteration (return_t r) const { return r; }
294 const hb_codepoint_t *glyphs;
298 hb_would_apply_context_t (hb_face_t *face_,
299 const hb_codepoint_t *glyphs_,
301 bool zero_context_) :
305 zero_context (zero_context_) {}
308 struct hb_collect_glyphs_context_t :
309 hb_dispatch_context_t<hb_collect_glyphs_context_t>
311 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
312 template <typename T>
313 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
314 static return_t default_return_value () { return hb_empty_t (); }
315 void recurse (unsigned int lookup_index)
317 if (unlikely (nesting_level_left == 0 || !recurse_func))
320 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
321 * past the previous check. For GSUB, we only want to collect the output
322 * glyphs in the recursion. If output is not requested, we can go home now.
324 * Note further, that the above is not exactly correct. A recursed lookup
325 * is allowed to match input that is not matched in the context, but that's
326 * not how most fonts are built. It's possible to relax that and recurse
327 * with all sets here if it proves to be an issue.
330 if (output == hb_set_get_empty ())
333 /* Return if new lookup was recursed to before. */
334 if (recursed_lookups->has (lookup_index))
337 hb_set_t *old_before = before;
338 hb_set_t *old_input = input;
339 hb_set_t *old_after = after;
340 before = input = after = hb_set_get_empty ();
342 nesting_level_left--;
343 recurse_func (this, lookup_index);
344 nesting_level_left++;
350 recursed_lookups->add (lookup_index);
358 recurse_func_t recurse_func;
359 hb_set_t *recursed_lookups;
360 unsigned int nesting_level_left;
362 hb_collect_glyphs_context_t (hb_face_t *face_,
363 hb_set_t *glyphs_before, /* OUT. May be NULL */
364 hb_set_t *glyphs_input, /* OUT. May be NULL */
365 hb_set_t *glyphs_after, /* OUT. May be NULL */
366 hb_set_t *glyphs_output, /* OUT. May be NULL */
367 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
369 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
370 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
371 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
372 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
373 recurse_func (nullptr),
374 recursed_lookups (hb_set_create ()),
375 nesting_level_left (nesting_level_left_) {}
376 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
378 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
383 template <typename set_t>
384 struct hb_collect_coverage_context_t :
385 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
387 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
388 template <typename T>
389 return_t dispatch (const T &obj) { return obj.get_coverage (); }
390 static return_t default_return_value () { return Null (Coverage); }
391 bool stop_sublookup_iteration (return_t r) const
393 r.collect_coverage (set);
397 hb_collect_coverage_context_t (set_t *set_) :
404 struct hb_ot_apply_context_t :
405 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
414 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
417 match_func (nullptr),
418 match_data (nullptr) {}
420 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
422 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
423 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
424 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
425 void set_mask (hb_mask_t mask_) { mask = mask_; }
426 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
427 void set_match_func (match_func_t match_func_,
428 const void *match_data_)
429 { match_func = match_func_; match_data = match_data_; }
437 may_match_t may_match (const hb_glyph_info_t &info,
438 const HBUINT16 *glyph_data) const
440 if (!(info.mask & mask) ||
441 (syllable && syllable != info.syllable ()))
445 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
456 may_skip_t may_skip (const hb_ot_apply_context_t *c,
457 const hb_glyph_info_t &info) const
459 if (!c->check_glyph_property (&info, lookup_props))
462 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
463 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
464 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
471 unsigned int lookup_props;
476 match_func_t match_func;
477 const void *match_data;
480 struct skipping_iterator_t
482 void init (hb_ot_apply_context_t *c_, bool context_match = false)
485 match_glyph_data = nullptr;
486 matcher.set_match_func (nullptr, nullptr);
487 matcher.set_lookup_props (c->lookup_props);
488 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
489 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
490 /* Ignore ZWJ if we are matching context, or asked to. */
491 matcher.set_ignore_zwj (context_match || c->auto_zwj);
492 matcher.set_mask (context_match ? -1 : c->lookup_mask);
494 void set_lookup_props (unsigned int lookup_props)
496 matcher.set_lookup_props (lookup_props);
498 void set_match_func (matcher_t::match_func_t match_func_,
499 const void *match_data_,
500 const HBUINT16 glyph_data[])
502 matcher.set_match_func (match_func_, match_data_);
503 match_glyph_data = glyph_data;
506 void reset (unsigned int start_index_,
507 unsigned int num_items_)
510 num_items = num_items_;
511 end = c->buffer->len;
512 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
518 if (match_glyph_data) match_glyph_data--;
521 matcher_t::may_skip_t
522 may_skip (const hb_glyph_info_t &info) const
523 { return matcher.may_skip (c, info); }
525 bool next (unsigned *unsafe_to = nullptr)
527 assert (num_items > 0);
528 while (idx + num_items < end)
531 const hb_glyph_info_t &info = c->buffer->info[idx];
533 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
534 if (unlikely (skip == matcher_t::SKIP_YES))
537 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
538 if (match == matcher_t::MATCH_YES ||
539 (match == matcher_t::MATCH_MAYBE &&
540 skip == matcher_t::SKIP_NO))
543 if (match_glyph_data) match_glyph_data++;
547 if (skip == matcher_t::SKIP_NO)
550 *unsafe_to = idx + 1;
558 bool prev (unsigned *unsafe_from = nullptr)
560 assert (num_items > 0);
561 while (idx > num_items - 1)
564 const hb_glyph_info_t &info = c->buffer->out_info[idx];
566 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
567 if (unlikely (skip == matcher_t::SKIP_YES))
570 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
571 if (match == matcher_t::MATCH_YES ||
572 (match == matcher_t::MATCH_MAYBE &&
573 skip == matcher_t::SKIP_NO))
576 if (match_glyph_data) match_glyph_data++;
580 if (skip == matcher_t::SKIP_NO)
583 *unsafe_from = hb_max (1u, idx) - 1u;
594 hb_ot_apply_context_t *c;
596 const HBUINT16 *match_glyph_data;
598 unsigned int num_items;
603 const char *get_name () { return "APPLY"; }
604 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
605 template <typename T>
606 return_t dispatch (const T &obj) { return obj.apply (this); }
607 static return_t default_return_value () { return false; }
608 bool stop_sublookup_iteration (return_t r) const { return r; }
609 return_t recurse (unsigned int sub_lookup_index)
611 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
612 return default_return_value ();
614 nesting_level_left--;
615 bool ret = recurse_func (this, sub_lookup_index);
616 nesting_level_left++;
620 skipping_iterator_t iter_input, iter_context;
625 recurse_func_t recurse_func;
627 const VariationStore &var_store;
629 hb_direction_t direction;
630 hb_mask_t lookup_mask;
631 unsigned int table_index; /* GSUB/GPOS */
632 unsigned int lookup_index;
633 unsigned int lookup_props;
634 unsigned int nesting_level_left;
636 bool has_glyph_classes;
641 uint32_t random_state;
644 signed last_base = -1; // GPOS uses
645 unsigned last_base_until = 0; // GPOS uses
647 hb_ot_apply_context_t (unsigned int table_index_,
649 hb_buffer_t *buffer_) :
650 iter_input (), iter_context (),
651 font (font_), face (font->face), buffer (buffer_),
652 recurse_func (nullptr),
654 #ifndef HB_NO_OT_LAYOUT
655 *face->table.GDEF->table
660 var_store (gdef.get_var_store ()),
661 direction (buffer_->props.direction),
663 table_index (table_index_),
664 lookup_index ((unsigned int) -1),
666 nesting_level_left (HB_MAX_NESTING_LEVEL),
667 has_glyph_classes (gdef.has_glyph_classes ()),
671 random_state (1) { init_iters (); }
675 iter_input.init (this, false);
676 iter_context.init (this, true);
679 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; last_base = -1; last_base_until = 0; init_iters (); }
680 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
681 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
682 void set_random (bool random_) { random = random_; }
683 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
684 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
685 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
687 uint32_t random_number ()
689 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
690 random_state = random_state * 48271 % 2147483647;
694 bool match_properties_mark (hb_codepoint_t glyph,
695 unsigned int glyph_props,
696 unsigned int match_props) const
698 /* If using mark filtering sets, the high short of
699 * match_props has the set index.
701 if (match_props & LookupFlag::UseMarkFilteringSet)
702 return gdef.mark_set_covers (match_props >> 16, glyph);
704 /* The second byte of match_props has the meaning
705 * "ignore marks of attachment type different than
706 * the attachment type specified."
708 if (match_props & LookupFlag::MarkAttachmentType)
709 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
714 bool check_glyph_property (const hb_glyph_info_t *info,
715 unsigned int match_props) const
717 hb_codepoint_t glyph = info->codepoint;
718 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
720 /* Not covered, if, for example, glyph class is ligature and
721 * match_props includes LookupFlags::IgnoreLigatures
723 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
726 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
727 return match_properties_mark (glyph, glyph_props, match_props);
732 void _set_glyph_class (hb_codepoint_t glyph_index,
733 unsigned int class_guess = 0,
734 bool ligature = false,
735 bool component = false) const
737 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
738 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
741 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
742 /* In the only place that the MULTIPLIED bit is used, Uniscribe
743 * seems to only care about the "last" transformation between
744 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
745 * and ligate again, it forgives the multiplication and acts as
746 * if only ligation happened. As such, clear MULTIPLIED bit.
748 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
751 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
752 if (likely (has_glyph_classes))
754 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
755 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef.get_glyph_props (glyph_index));
757 else if (class_guess)
759 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
760 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
763 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
766 void replace_glyph (hb_codepoint_t glyph_index) const
768 _set_glyph_class (glyph_index);
769 (void) buffer->replace_glyph (glyph_index);
771 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
773 _set_glyph_class (glyph_index);
774 buffer->cur().codepoint = glyph_index;
776 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
777 unsigned int class_guess) const
779 _set_glyph_class (glyph_index, class_guess, true);
780 (void) buffer->replace_glyph (glyph_index);
782 void output_glyph_for_component (hb_codepoint_t glyph_index,
783 unsigned int class_guess) const
785 _set_glyph_class (glyph_index, class_guess, false, true);
786 (void) buffer->output_glyph (glyph_index);
791 struct hb_get_subtables_context_t :
792 hb_dispatch_context_t<hb_get_subtables_context_t>
794 template <typename Type>
795 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
797 const Type *typed_obj = (const Type *) obj;
798 return typed_obj->apply (c);
801 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
803 struct hb_applicable_t
805 template <typename T>
806 void init (const T &obj_, hb_apply_func_t apply_func_)
809 apply_func = apply_func_;
811 obj_.get_coverage ().collect_coverage (&digest);
814 bool apply (OT::hb_ot_apply_context_t *c) const
816 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
821 hb_apply_func_t apply_func;
822 hb_set_digest_t digest;
825 typedef hb_vector_t<hb_applicable_t> array_t;
827 /* Dispatch interface. */
828 template <typename T>
829 return_t dispatch (const T &obj)
831 hb_applicable_t *entry = array.push();
832 entry->init (obj, apply_to<T>);
833 return hb_empty_t ();
835 static return_t default_return_value () { return hb_empty_t (); }
837 hb_get_subtables_context_t (array_t &array_) :
846 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
847 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
848 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
849 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
851 struct ContextClosureFuncs
853 intersects_func_t intersects;
854 intersected_glyphs_func_t intersected_glyphs;
856 struct ContextCollectGlyphsFuncs
858 collect_glyphs_func_t collect;
860 struct ContextApplyFuncs
866 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
868 return glyphs->has (value);
870 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
872 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
873 return class_def.intersects_class (glyphs, value);
875 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
877 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
878 return (data+coverage).intersects (glyphs);
882 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
884 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
885 intersected_glyphs->add (g);
887 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
889 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
890 class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
892 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
894 Offset16To<Coverage> coverage;
896 (data+coverage).intersected_coverage_glyphs (glyphs, intersected_glyphs);
900 static inline bool array_is_subset_of (const hb_set_t *glyphs,
902 const HBUINT16 values[],
903 intersects_func_t intersects_func,
904 const void *intersects_data)
906 for (const HBUINT16 &_ : + hb_iter (values, count))
907 if (!intersects_func (glyphs, _, intersects_data)) return false;
912 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
916 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
918 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
919 class_def.collect_class (glyphs, value);
921 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
923 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
924 (data+coverage).collect_coverage (glyphs);
926 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
929 const HBUINT16 values[],
930 collect_glyphs_func_t collect_func,
931 const void *collect_data)
934 + hb_iter (values, count)
935 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
940 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
942 return glyph_id == value;
944 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
946 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
947 return class_def.get_class (glyph_id) == value;
949 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
951 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
952 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
955 static inline bool would_match_input (hb_would_apply_context_t *c,
956 unsigned int count, /* Including the first glyph (not matched) */
957 const HBUINT16 input[], /* Array of input values--start with second glyph */
958 match_func_t match_func,
959 const void *match_data)
964 for (unsigned int i = 1; i < count; i++)
965 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
970 static inline bool match_input (hb_ot_apply_context_t *c,
971 unsigned int count, /* Including the first glyph (not matched) */
972 const HBUINT16 input[], /* Array of input values--start with second glyph */
973 match_func_t match_func,
974 const void *match_data,
975 unsigned int *end_position,
976 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
977 unsigned int *p_total_component_count = nullptr)
979 TRACE_APPLY (nullptr);
981 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
983 hb_buffer_t *buffer = c->buffer;
985 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
986 skippy_iter.reset (buffer->idx, count - 1);
987 skippy_iter.set_match_func (match_func, match_data, input);
990 * This is perhaps the trickiest part of OpenType... Remarks:
992 * - If all components of the ligature were marks, we call this a mark ligature.
994 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
995 * it as a ligature glyph.
997 * - Ligatures cannot be formed across glyphs attached to different components
998 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
999 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
1000 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
1001 * There are a couple of exceptions to this:
1003 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
1004 * assuming that the font designer knows what they are doing (otherwise it can
1005 * break Indic stuff when a matra wants to ligate with a conjunct,
1007 * o If two marks want to ligate and they belong to different components of the
1008 * same ligature glyph, and said ligature glyph is to be ignored according to
1009 * mark-filtering rules, then allow.
1010 * https://github.com/harfbuzz/harfbuzz/issues/545
1013 unsigned int total_component_count = 0;
1014 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1016 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1017 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1020 LIGBASE_NOT_CHECKED,
1021 LIGBASE_MAY_NOT_SKIP,
1023 } ligbase = LIGBASE_NOT_CHECKED;
1025 match_positions[0] = buffer->idx;
1026 for (unsigned int i = 1; i < count; i++)
1029 if (!skippy_iter.next (&unsafe_to))
1031 *end_position = unsafe_to;
1032 return_trace (false);
1035 match_positions[i] = skippy_iter.idx;
1037 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1038 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1040 if (first_lig_id && first_lig_comp)
1042 /* If first component was attached to a previous ligature component,
1043 * all subsequent components should be attached to the same ligature
1044 * component, otherwise we shouldn't ligate them... */
1045 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1047 /* ...unless, we are attached to a base ligature and that base
1048 * ligature is ignorable. */
1049 if (ligbase == LIGBASE_NOT_CHECKED)
1052 const auto *out = buffer->out_info;
1053 unsigned int j = buffer->out_len;
1054 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1056 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1065 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1066 ligbase = LIGBASE_MAY_SKIP;
1068 ligbase = LIGBASE_MAY_NOT_SKIP;
1071 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1072 return_trace (false);
1077 /* If first component was NOT attached to a previous ligature component,
1078 * all subsequent components should also NOT be attached to any ligature
1079 * component, unless they are attached to the first component itself! */
1080 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1081 return_trace (false);
1084 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1087 *end_position = skippy_iter.idx + 1;
1089 if (p_total_component_count)
1090 *p_total_component_count = total_component_count;
1092 return_trace (true);
1094 static inline bool ligate_input (hb_ot_apply_context_t *c,
1095 unsigned int count, /* Including the first glyph */
1096 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1097 unsigned int match_end,
1098 hb_codepoint_t lig_glyph,
1099 unsigned int total_component_count)
1101 TRACE_APPLY (nullptr);
1103 hb_buffer_t *buffer = c->buffer;
1105 buffer->merge_clusters (buffer->idx, match_end);
1107 /* - If a base and one or more marks ligate, consider that as a base, NOT
1108 * ligature, such that all following marks can still attach to it.
1109 * https://github.com/harfbuzz/harfbuzz/issues/1109
1111 * - If all components of the ligature were marks, we call this a mark ligature.
1112 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1113 * the ligature to keep its old ligature id. This will allow it to attach to
1114 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1115 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1116 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1117 * later, we don't want them to lose their ligature id/component, otherwise
1118 * GPOS will fail to correctly position the mark ligature on top of the
1119 * LAM,LAM,HEH ligature. See:
1120 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1122 * - If a ligature is formed of components that some of which are also ligatures
1123 * themselves, and those ligature components had marks attached to *their*
1124 * components, we have to attach the marks to the new ligature component
1125 * positions! Now *that*'s tricky! And these marks may be following the
1126 * last component of the whole sequence, so we should loop forward looking
1127 * for them and update them.
1129 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1130 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1131 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1132 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1133 * the new ligature with a component value of 2.
1135 * This in fact happened to a font... See:
1136 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1139 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1140 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1141 for (unsigned int i = 1; i < count; i++)
1142 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1144 is_base_ligature = false;
1145 is_mark_ligature = false;
1148 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1150 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1151 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1152 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1153 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1154 unsigned int components_so_far = last_num_components;
1158 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1159 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1161 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1164 c->replace_glyph_with_ligature (lig_glyph, klass);
1166 for (unsigned int i = 1; i < count; i++)
1168 while (buffer->idx < match_positions[i] && buffer->successful)
1172 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1174 this_comp = last_num_components;
1175 unsigned int new_lig_comp = components_so_far - last_num_components +
1176 hb_min (this_comp, last_num_components);
1177 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1179 (void) buffer->next_glyph ();
1182 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1183 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1184 components_so_far += last_num_components;
1186 /* Skip the base glyph */
1190 if (!is_mark_ligature && last_lig_id)
1192 /* Re-adjust components for any marks following. */
1193 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1195 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1197 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1198 if (!this_comp) break;
1200 unsigned new_lig_comp = components_so_far - last_num_components +
1201 hb_min (this_comp, last_num_components);
1202 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1205 return_trace (true);
1208 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1210 const HBUINT16 backtrack[],
1211 match_func_t match_func,
1212 const void *match_data,
1213 unsigned int *match_start)
1215 TRACE_APPLY (nullptr);
1217 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1218 skippy_iter.reset (c->buffer->backtrack_len (), count);
1219 skippy_iter.set_match_func (match_func, match_data, backtrack);
1221 for (unsigned int i = 0; i < count; i++)
1223 unsigned unsafe_from;
1224 if (!skippy_iter.prev (&unsafe_from))
1226 *match_start = unsafe_from;
1227 return_trace (false);
1231 *match_start = skippy_iter.idx;
1232 return_trace (true);
1235 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1237 const HBUINT16 lookahead[],
1238 match_func_t match_func,
1239 const void *match_data,
1240 unsigned int start_index,
1241 unsigned int *end_index)
1243 TRACE_APPLY (nullptr);
1245 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1246 skippy_iter.reset (start_index - 1, count);
1247 skippy_iter.set_match_func (match_func, match_data, lookahead);
1249 for (unsigned int i = 0; i < count; i++)
1252 if (!skippy_iter.next (&unsafe_to))
1254 *end_index = unsafe_to;
1255 return_trace (false);
1259 *end_index = skippy_iter.idx + 1;
1260 return_trace (true);
1267 bool serialize (hb_serialize_context_t *c,
1268 const hb_map_t *lookup_map) const
1270 TRACE_SERIALIZE (this);
1271 auto *out = c->embed (*this);
1272 if (unlikely (!out)) return_trace (false);
1274 return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
1277 bool sanitize (hb_sanitize_context_t *c) const
1279 TRACE_SANITIZE (this);
1280 return_trace (c->check_struct (this));
1283 HBUINT16 sequenceIndex; /* Index into current glyph
1284 * sequence--first glyph = 0 */
1285 HBUINT16 lookupListIndex; /* Lookup to apply to that
1286 * position--zero--based */
1288 DEFINE_SIZE_STATIC (4);
1291 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
1292 const hb_array_t<const LookupRecord> lookupRecords,
1293 const hb_map_t *lookup_map)
1296 for (const LookupRecord& r : lookupRecords)
1298 if (!lookup_map->has (r.lookupListIndex))
1301 if (!r.serialize (c, lookup_map))
1309 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1311 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1312 unsigned inputCount, const HBUINT16 input[],
1313 unsigned lookupCount,
1314 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1316 ContextFormat context_format,
1318 intersected_glyphs_func_t intersected_glyphs_func)
1320 hb_set_t *covered_seq_indicies = hb_set_create ();
1321 for (unsigned int i = 0; i < lookupCount; i++)
1323 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1324 if (seqIndex >= inputCount) continue;
1326 bool has_pos_glyphs = false;
1327 hb_set_t pos_glyphs;
1329 if (hb_set_is_empty (covered_seq_indicies) || !hb_set_has (covered_seq_indicies, seqIndex))
1331 has_pos_glyphs = true;
1334 switch (context_format) {
1335 case ContextFormat::SimpleContext:
1336 pos_glyphs.add (value);
1338 case ContextFormat::ClassBasedContext:
1339 intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs);
1341 case ContextFormat::CoverageBasedContext:
1342 pos_glyphs.set (c->parent_active_glyphs ());
1348 const void *input_data = input;
1349 unsigned input_value = seqIndex - 1;
1350 if (context_format != ContextFormat::SimpleContext)
1353 input_value = input[seqIndex - 1];
1356 intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs);
1360 covered_seq_indicies->add (seqIndex);
1361 if (has_pos_glyphs) {
1362 c->push_cur_active_glyphs () = pos_glyphs;
1364 c->push_cur_active_glyphs ().set (*c->glyphs);
1367 unsigned endIndex = inputCount;
1368 if (context_format == ContextFormat::CoverageBasedContext)
1371 c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
1373 c->pop_cur_done_glyphs ();
1376 hb_set_destroy (covered_seq_indicies);
1379 template <typename context_t>
1380 static inline void recurse_lookups (context_t *c,
1381 unsigned int lookupCount,
1382 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1384 for (unsigned int i = 0; i < lookupCount; i++)
1385 c->recurse (lookupRecord[i].lookupListIndex);
1388 static inline void apply_lookup (hb_ot_apply_context_t *c,
1389 unsigned int count, /* Including the first glyph */
1390 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1391 unsigned int lookupCount,
1392 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1393 unsigned int match_end)
1395 hb_buffer_t *buffer = c->buffer;
1398 /* All positions are distance from beginning of *output* buffer.
1401 unsigned int bl = buffer->backtrack_len ();
1402 end = bl + match_end - buffer->idx;
1404 int delta = bl - buffer->idx;
1405 /* Convert positions to new indexing. */
1406 for (unsigned int j = 0; j < count; j++)
1407 match_positions[j] += delta;
1410 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1412 unsigned int idx = lookupRecord[i].sequenceIndex;
1416 /* Don't recurse to ourself at same position.
1417 * Note that this test is too naive, it doesn't catch longer loops. */
1418 if (unlikely (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index))
1421 if (unlikely (!buffer->move_to (match_positions[idx])))
1424 if (unlikely (buffer->max_ops <= 0))
1427 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1428 if (!c->recurse (lookupRecord[i].lookupListIndex))
1431 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1432 int delta = new_len - orig_len;
1437 /* Recursed lookup changed buffer len. Adjust.
1441 * Right now, if buffer length increased by n, we assume n new glyphs
1442 * were added right after the current position, and if buffer length
1443 * was decreased by n, we assume n match positions after the current
1444 * one where removed. The former (buffer length increased) case is
1445 * fine, but the decrease case can be improved in at least two ways,
1446 * both of which are significant:
1448 * - If recursed-to lookup is MultipleSubst and buffer length
1449 * decreased, then it's current match position that was deleted,
1450 * NOT the one after it.
1452 * - If buffer length was decreased by n, it does not necessarily
1453 * mean that n match positions where removed, as there might
1454 * have been marks and default-ignorables in the sequence. We
1455 * should instead drop match positions between current-position
1456 * and current-position + n instead. Though, am not sure which
1457 * one is better. Both cases have valid uses. Sigh.
1459 * It should be possible to construct tests for both of these cases.
1463 if (end <= int (match_positions[idx]))
1465 /* End might end up being smaller than match_positions[idx] if the recursed
1466 * lookup ended up removing many items, more than we have had matched.
1467 * Just never rewind end back and get out of here.
1468 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1469 end = match_positions[idx];
1470 /* There can't be any further changes. */
1474 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1478 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1483 /* NOTE: delta is negative. */
1484 delta = hb_max (delta, (int) next - (int) count);
1489 memmove (match_positions + next + delta, match_positions + next,
1490 (count - next) * sizeof (match_positions[0]));
1494 /* Fill in new entries. */
1495 for (unsigned int j = idx + 1; j < next; j++)
1496 match_positions[j] = match_positions[j - 1] + 1;
1498 /* And fixup the rest. */
1499 for (; next < count; next++)
1500 match_positions[next] += delta;
1503 (void) buffer->move_to (end);
1508 /* Contextual lookups */
1510 struct ContextClosureLookupContext
1512 ContextClosureFuncs funcs;
1513 ContextFormat context_format;
1514 const void *intersects_data;
1517 struct ContextCollectGlyphsLookupContext
1519 ContextCollectGlyphsFuncs funcs;
1520 const void *collect_data;
1523 struct ContextApplyLookupContext
1525 ContextApplyFuncs funcs;
1526 const void *match_data;
1529 static inline bool context_intersects (const hb_set_t *glyphs,
1530 unsigned int inputCount, /* Including the first glyph (not matched) */
1531 const HBUINT16 input[], /* Array of input values--start with second glyph */
1532 ContextClosureLookupContext &lookup_context)
1534 return array_is_subset_of (glyphs,
1535 inputCount ? inputCount - 1 : 0, input,
1536 lookup_context.funcs.intersects, lookup_context.intersects_data);
1539 static inline void context_closure_lookup (hb_closure_context_t *c,
1540 unsigned int inputCount, /* Including the first glyph (not matched) */
1541 const HBUINT16 input[], /* Array of input values--start with second glyph */
1542 unsigned int lookupCount,
1543 const LookupRecord lookupRecord[],
1544 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1545 ContextClosureLookupContext &lookup_context)
1547 if (context_intersects (c->glyphs,
1550 context_closure_recurse_lookups (c,
1552 lookupCount, lookupRecord,
1554 lookup_context.context_format,
1555 lookup_context.intersects_data,
1556 lookup_context.funcs.intersected_glyphs);
1559 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1560 unsigned int inputCount, /* Including the first glyph (not matched) */
1561 const HBUINT16 input[], /* Array of input values--start with second glyph */
1562 unsigned int lookupCount,
1563 const LookupRecord lookupRecord[],
1564 ContextCollectGlyphsLookupContext &lookup_context)
1566 collect_array (c, c->input,
1567 inputCount ? inputCount - 1 : 0, input,
1568 lookup_context.funcs.collect, lookup_context.collect_data);
1570 lookupCount, lookupRecord);
1573 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1574 unsigned int inputCount, /* Including the first glyph (not matched) */
1575 const HBUINT16 input[], /* Array of input values--start with second glyph */
1576 unsigned int lookupCount HB_UNUSED,
1577 const LookupRecord lookupRecord[] HB_UNUSED,
1578 ContextApplyLookupContext &lookup_context)
1580 return would_match_input (c,
1582 lookup_context.funcs.match, lookup_context.match_data);
1584 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1585 unsigned int inputCount, /* Including the first glyph (not matched) */
1586 const HBUINT16 input[], /* Array of input values--start with second glyph */
1587 unsigned int lookupCount,
1588 const LookupRecord lookupRecord[],
1589 ContextApplyLookupContext &lookup_context)
1591 unsigned match_end = 0;
1592 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
1595 lookup_context.funcs.match, lookup_context.match_data,
1596 &match_end, match_positions))
1598 c->buffer->unsafe_to_break (c->buffer->idx, match_end);
1600 inputCount, match_positions,
1601 lookupCount, lookupRecord,
1607 c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
1614 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1616 return context_intersects (glyphs,
1617 inputCount, inputZ.arrayZ,
1621 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
1623 if (unlikely (c->lookup_limit_exceeded ())) return;
1625 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1626 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1627 context_closure_lookup (c,
1628 inputCount, inputZ.arrayZ,
1629 lookupCount, lookupRecord.arrayZ,
1630 value, lookup_context);
1633 void closure_lookups (hb_closure_lookups_context_t *c,
1634 ContextClosureLookupContext &lookup_context) const
1636 if (unlikely (c->lookup_limit_exceeded ())) return;
1637 if (!intersects (c->glyphs, lookup_context)) return;
1639 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1640 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1641 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1644 void collect_glyphs (hb_collect_glyphs_context_t *c,
1645 ContextCollectGlyphsLookupContext &lookup_context) const
1647 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1648 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1649 context_collect_glyphs_lookup (c,
1650 inputCount, inputZ.arrayZ,
1651 lookupCount, lookupRecord.arrayZ,
1655 bool would_apply (hb_would_apply_context_t *c,
1656 ContextApplyLookupContext &lookup_context) const
1658 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1659 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1660 return context_would_apply_lookup (c,
1661 inputCount, inputZ.arrayZ,
1662 lookupCount, lookupRecord.arrayZ,
1666 bool apply (hb_ot_apply_context_t *c,
1667 ContextApplyLookupContext &lookup_context) const
1670 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1671 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1672 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1675 bool serialize (hb_serialize_context_t *c,
1676 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1677 const hb_map_t *lookup_map) const
1679 TRACE_SERIALIZE (this);
1680 auto *out = c->start_embed (this);
1681 if (unlikely (!c->extend_min (out))) return_trace (false);
1683 out->inputCount = inputCount;
1684 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1685 for (const auto org : input)
1688 d = input_mapping->get (org);
1692 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1693 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1695 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
1696 return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
1699 bool subset (hb_subset_context_t *c,
1700 const hb_map_t *lookup_map,
1701 const hb_map_t *klass_map = nullptr) const
1703 TRACE_SUBSET (this);
1704 if (unlikely (!inputCount)) return_trace (false);
1705 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1707 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1708 if (!hb_all (input, mapping)) return_trace (false);
1709 return_trace (serialize (c->serializer, mapping, lookup_map));
1713 bool sanitize (hb_sanitize_context_t *c) const
1715 TRACE_SANITIZE (this);
1716 return_trace (inputCount.sanitize (c) &&
1717 lookupCount.sanitize (c) &&
1718 c->check_range (inputZ.arrayZ,
1719 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1720 LookupRecord::static_size * lookupCount));
1724 HBUINT16 inputCount; /* Total number of glyphs in input
1725 * glyph sequence--includes the first
1727 HBUINT16 lookupCount; /* Number of LookupRecords */
1728 UnsizedArrayOf<HBUINT16>
1729 inputZ; /* Array of match inputs--start with
1731 /*UnsizedArrayOf<LookupRecord>
1732 lookupRecordX;*/ /* Array of LookupRecords--in
1735 DEFINE_SIZE_ARRAY (4, inputZ);
1740 bool intersects (const hb_set_t *glyphs,
1741 ContextClosureLookupContext &lookup_context) const
1745 | hb_map (hb_add (this))
1746 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1751 void closure (hb_closure_context_t *c, unsigned value,
1752 ContextClosureLookupContext &lookup_context) const
1754 if (unlikely (c->lookup_limit_exceeded ())) return;
1758 | hb_map (hb_add (this))
1759 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
1763 void closure_lookups (hb_closure_lookups_context_t *c,
1764 ContextClosureLookupContext &lookup_context) const
1766 if (unlikely (c->lookup_limit_exceeded ())) return;
1768 | hb_map (hb_add (this))
1769 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
1773 void collect_glyphs (hb_collect_glyphs_context_t *c,
1774 ContextCollectGlyphsLookupContext &lookup_context) const
1778 | hb_map (hb_add (this))
1779 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1783 bool would_apply (hb_would_apply_context_t *c,
1784 ContextApplyLookupContext &lookup_context) const
1788 | hb_map (hb_add (this))
1789 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1794 bool apply (hb_ot_apply_context_t *c,
1795 ContextApplyLookupContext &lookup_context) const
1800 | hb_map (hb_add (this))
1801 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1807 bool subset (hb_subset_context_t *c,
1808 const hb_map_t *lookup_map,
1809 const hb_map_t *klass_map = nullptr) const
1811 TRACE_SUBSET (this);
1813 auto snap = c->serializer->snapshot ();
1814 auto *out = c->serializer->start_embed (*this);
1815 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1817 for (const Offset16To<Rule>& _ : rule)
1820 auto o_snap = c->serializer->snapshot ();
1821 auto *o = out->rule.serialize_append (c->serializer);
1822 if (unlikely (!o)) continue;
1824 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1827 c->serializer->revert (o_snap);
1831 bool ret = bool (out->rule);
1832 if (!ret) c->serializer->revert (snap);
1837 bool sanitize (hb_sanitize_context_t *c) const
1839 TRACE_SANITIZE (this);
1840 return_trace (rule.sanitize (c, this));
1844 Array16OfOffset16To<Rule>
1845 rule; /* Array of Rule tables
1846 * ordered by preference */
1848 DEFINE_SIZE_ARRAY (2, rule);
1852 struct ContextFormat1
1854 bool intersects (const hb_set_t *glyphs) const
1856 struct ContextClosureLookupContext lookup_context = {
1857 {intersects_glyph, intersected_glyph},
1858 ContextFormat::SimpleContext,
1863 + hb_zip (this+coverage, ruleSet)
1864 | hb_filter (*glyphs, hb_first)
1865 | hb_map (hb_second)
1866 | hb_map (hb_add (this))
1867 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1872 bool may_have_non_1to1 () const
1875 void closure (hb_closure_context_t *c) const
1877 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
1878 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
1881 struct ContextClosureLookupContext lookup_context = {
1882 {intersects_glyph, intersected_glyph},
1883 ContextFormat::SimpleContext,
1887 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
1888 | hb_filter ([&] (hb_codepoint_t _) {
1889 return c->previous_parent_active_glyphs ().has (_);
1891 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
1892 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
1895 c->pop_cur_done_glyphs ();
1898 void closure_lookups (hb_closure_lookups_context_t *c) const
1900 struct ContextClosureLookupContext lookup_context = {
1901 {intersects_glyph, intersected_glyph},
1902 ContextFormat::SimpleContext,
1906 + hb_zip (this+coverage, ruleSet)
1907 | hb_filter (*c->glyphs, hb_first)
1908 | hb_map (hb_second)
1909 | hb_map (hb_add (this))
1910 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
1914 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1916 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1918 (this+coverage).collect_coverage (c->input);
1920 struct ContextCollectGlyphsLookupContext lookup_context = {
1926 | hb_map (hb_add (this))
1927 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1931 bool would_apply (hb_would_apply_context_t *c) const
1933 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1934 struct ContextApplyLookupContext lookup_context = {
1938 return rule_set.would_apply (c, lookup_context);
1941 const Coverage &get_coverage () const { return this+coverage; }
1943 bool apply (hb_ot_apply_context_t *c) const
1946 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1947 if (likely (index == NOT_COVERED))
1948 return_trace (false);
1950 const RuleSet &rule_set = this+ruleSet[index];
1951 struct ContextApplyLookupContext lookup_context = {
1955 return_trace (rule_set.apply (c, lookup_context));
1958 bool subset (hb_subset_context_t *c) const
1960 TRACE_SUBSET (this);
1961 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1962 const hb_map_t &glyph_map = *c->plan->glyph_map;
1964 auto *out = c->serializer->start_embed (*this);
1965 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1966 out->format = format;
1968 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1969 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1970 + hb_zip (this+coverage, ruleSet)
1971 | hb_filter (glyphset, hb_first)
1972 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1974 | hb_map (glyph_map)
1975 | hb_sink (new_coverage)
1978 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
1979 return_trace (bool (new_coverage));
1982 bool sanitize (hb_sanitize_context_t *c) const
1984 TRACE_SANITIZE (this);
1985 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1989 HBUINT16 format; /* Format identifier--format = 1 */
1990 Offset16To<Coverage>
1991 coverage; /* Offset to Coverage table--from
1992 * beginning of table */
1993 Array16OfOffset16To<RuleSet>
1994 ruleSet; /* Array of RuleSet tables
1995 * ordered by Coverage Index */
1997 DEFINE_SIZE_ARRAY (6, ruleSet);
2001 struct ContextFormat2
2003 bool intersects (const hb_set_t *glyphs) const
2005 if (!(this+coverage).intersects (glyphs))
2008 const ClassDef &class_def = this+classDef;
2010 struct ContextClosureLookupContext lookup_context = {
2011 {intersects_class, intersected_class_glyphs},
2012 ContextFormat::ClassBasedContext,
2016 hb_set_t retained_coverage_glyphs;
2017 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
2019 hb_set_t coverage_glyph_classes;
2020 class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2025 | hb_map (hb_add (this))
2027 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2028 { return class_def.intersects_class (glyphs, p.first) &&
2029 coverage_glyph_classes.has (p.first) &&
2030 p.second.intersects (glyphs, lookup_context); })
2035 bool may_have_non_1to1 () const
2038 void closure (hb_closure_context_t *c) const
2040 if (!(this+coverage).intersects (c->glyphs))
2043 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2044 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2047 const ClassDef &class_def = this+classDef;
2049 struct ContextClosureLookupContext lookup_context = {
2050 {intersects_class, intersected_class_glyphs},
2051 ContextFormat::ClassBasedContext,
2055 + hb_enumerate (ruleSet)
2056 | hb_filter ([&] (unsigned _)
2057 { return class_def.intersects_class (&c->parent_active_glyphs (), _); },
2059 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<RuleSet>&> _)
2061 const RuleSet& rule_set = this+_.second;
2062 rule_set.closure (c, _.first, lookup_context);
2066 c->pop_cur_done_glyphs ();
2069 void closure_lookups (hb_closure_lookups_context_t *c) const
2071 if (!(this+coverage).intersects (c->glyphs))
2074 const ClassDef &class_def = this+classDef;
2076 struct ContextClosureLookupContext lookup_context = {
2077 {intersects_class, intersected_class_glyphs},
2078 ContextFormat::ClassBasedContext,
2083 | hb_map (hb_add (this))
2085 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2086 { return class_def.intersects_class (c->glyphs, p.first); })
2087 | hb_map (hb_second)
2088 | hb_apply ([&] (const RuleSet & _)
2089 { _.closure_lookups (c, lookup_context); });
2092 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2094 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2096 (this+coverage).collect_coverage (c->input);
2098 const ClassDef &class_def = this+classDef;
2099 struct ContextCollectGlyphsLookupContext lookup_context = {
2105 | hb_map (hb_add (this))
2106 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2110 bool would_apply (hb_would_apply_context_t *c) const
2112 const ClassDef &class_def = this+classDef;
2113 unsigned int index = class_def.get_class (c->glyphs[0]);
2114 const RuleSet &rule_set = this+ruleSet[index];
2115 struct ContextApplyLookupContext lookup_context = {
2119 return rule_set.would_apply (c, lookup_context);
2122 const Coverage &get_coverage () const { return this+coverage; }
2124 bool apply (hb_ot_apply_context_t *c) const
2127 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2128 if (likely (index == NOT_COVERED)) return_trace (false);
2130 const ClassDef &class_def = this+classDef;
2131 index = class_def.get_class (c->buffer->cur().codepoint);
2132 const RuleSet &rule_set = this+ruleSet[index];
2133 struct ContextApplyLookupContext lookup_context = {
2137 return_trace (rule_set.apply (c, lookup_context));
2140 bool subset (hb_subset_context_t *c) const
2142 TRACE_SUBSET (this);
2143 auto *out = c->serializer->start_embed (*this);
2144 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2145 out->format = format;
2146 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2147 return_trace (false);
2150 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2152 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
2153 hb_set_t retained_coverage_glyphs;
2154 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
2156 hb_set_t coverage_glyph_classes;
2157 (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2159 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2161 int non_zero_index = -1, index = 0;
2162 for (const auto& _ : + hb_enumerate (ruleSet)
2163 | hb_filter (klass_map, hb_first))
2165 auto *o = out->ruleSet.serialize_append (c->serializer);
2172 if (coverage_glyph_classes.has (_.first) &&
2173 o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
2174 non_zero_index = index;
2179 if (!ret || non_zero_index == -1) return_trace (false);
2181 //prune empty trailing ruleSets
2183 while (index > non_zero_index)
2185 out->ruleSet.pop ();
2189 return_trace (bool (out->ruleSet));
2192 bool sanitize (hb_sanitize_context_t *c) const
2194 TRACE_SANITIZE (this);
2195 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2199 HBUINT16 format; /* Format identifier--format = 2 */
2200 Offset16To<Coverage>
2201 coverage; /* Offset to Coverage table--from
2202 * beginning of table */
2203 Offset16To<ClassDef>
2204 classDef; /* Offset to glyph ClassDef table--from
2205 * beginning of table */
2206 Array16OfOffset16To<RuleSet>
2207 ruleSet; /* Array of RuleSet tables
2208 * ordered by class */
2210 DEFINE_SIZE_ARRAY (8, ruleSet);
2214 struct ContextFormat3
2216 bool intersects (const hb_set_t *glyphs) const
2218 if (!(this+coverageZ[0]).intersects (glyphs))
2221 struct ContextClosureLookupContext lookup_context = {
2222 {intersects_coverage, intersected_coverage_glyphs},
2223 ContextFormat::CoverageBasedContext,
2226 return context_intersects (glyphs,
2227 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2231 bool may_have_non_1to1 () const
2234 void closure (hb_closure_context_t *c) const
2236 if (!(this+coverageZ[0]).intersects (c->glyphs))
2239 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2240 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2244 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2245 struct ContextClosureLookupContext lookup_context = {
2246 {intersects_coverage, intersected_coverage_glyphs},
2247 ContextFormat::CoverageBasedContext,
2250 context_closure_lookup (c,
2251 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2252 lookupCount, lookupRecord,
2255 c->pop_cur_done_glyphs ();
2258 void closure_lookups (hb_closure_lookups_context_t *c) const
2260 if (!intersects (c->glyphs))
2262 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2263 recurse_lookups (c, lookupCount, lookupRecord);
2266 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2268 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2270 (this+coverageZ[0]).collect_coverage (c->input);
2272 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2273 struct ContextCollectGlyphsLookupContext lookup_context = {
2278 context_collect_glyphs_lookup (c,
2279 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2280 lookupCount, lookupRecord,
2284 bool would_apply (hb_would_apply_context_t *c) const
2286 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2287 struct ContextApplyLookupContext lookup_context = {
2291 return context_would_apply_lookup (c,
2292 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2293 lookupCount, lookupRecord,
2297 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2299 bool apply (hb_ot_apply_context_t *c) const
2302 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2303 if (likely (index == NOT_COVERED)) return_trace (false);
2305 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2306 struct ContextApplyLookupContext lookup_context = {
2310 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2313 bool subset (hb_subset_context_t *c) const
2315 TRACE_SUBSET (this);
2316 auto *out = c->serializer->start_embed (this);
2317 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2319 out->format = format;
2320 out->glyphCount = glyphCount;
2322 auto coverages = coverageZ.as_array (glyphCount);
2324 for (const Offset16To<Coverage>& offset : coverages)
2326 /* TODO(subset) This looks like should not be necessary to write this way. */
2327 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2328 if (unlikely (!o)) return_trace (false);
2329 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2332 const UnsizedArrayOf<LookupRecord>& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
2333 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2336 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
2337 return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2340 bool sanitize (hb_sanitize_context_t *c) const
2342 TRACE_SANITIZE (this);
2343 if (!c->check_struct (this)) return_trace (false);
2344 unsigned int count = glyphCount;
2345 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2346 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2347 for (unsigned int i = 0; i < count; i++)
2348 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2349 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2350 return_trace (c->check_array (lookupRecord, lookupCount));
2354 HBUINT16 format; /* Format identifier--format = 3 */
2355 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2357 HBUINT16 lookupCount; /* Number of LookupRecords */
2358 UnsizedArrayOf<Offset16To<Coverage>>
2359 coverageZ; /* Array of offsets to Coverage
2360 * table in glyph sequence order */
2361 /*UnsizedArrayOf<LookupRecord>
2362 lookupRecordX;*/ /* Array of LookupRecords--in
2365 DEFINE_SIZE_ARRAY (6, coverageZ);
2370 template <typename context_t, typename ...Ts>
2371 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2373 TRACE_DISPATCH (this, u.format);
2374 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2376 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2377 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
2378 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2379 default:return_trace (c->default_return_value ());
2385 HBUINT16 format; /* Format identifier */
2386 ContextFormat1 format1;
2387 ContextFormat2 format2;
2388 ContextFormat3 format3;
2393 /* Chaining Contextual lookups */
2395 struct ChainContextClosureLookupContext
2397 ContextClosureFuncs funcs;
2398 ContextFormat context_format;
2399 const void *intersects_data[3];
2402 struct ChainContextCollectGlyphsLookupContext
2404 ContextCollectGlyphsFuncs funcs;
2405 const void *collect_data[3];
2408 struct ChainContextApplyLookupContext
2410 ContextApplyFuncs funcs;
2411 const void *match_data[3];
2414 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2415 unsigned int backtrackCount,
2416 const HBUINT16 backtrack[],
2417 unsigned int inputCount, /* Including the first glyph (not matched) */
2418 const HBUINT16 input[], /* Array of input values--start with second glyph */
2419 unsigned int lookaheadCount,
2420 const HBUINT16 lookahead[],
2421 ChainContextClosureLookupContext &lookup_context)
2423 return array_is_subset_of (glyphs,
2424 backtrackCount, backtrack,
2425 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2426 && array_is_subset_of (glyphs,
2427 inputCount ? inputCount - 1 : 0, input,
2428 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2429 && array_is_subset_of (glyphs,
2430 lookaheadCount, lookahead,
2431 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2434 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2435 unsigned int backtrackCount,
2436 const HBUINT16 backtrack[],
2437 unsigned int inputCount, /* Including the first glyph (not matched) */
2438 const HBUINT16 input[], /* Array of input values--start with second glyph */
2439 unsigned int lookaheadCount,
2440 const HBUINT16 lookahead[],
2441 unsigned int lookupCount,
2442 const LookupRecord lookupRecord[],
2444 ChainContextClosureLookupContext &lookup_context)
2446 if (chain_context_intersects (c->glyphs,
2447 backtrackCount, backtrack,
2449 lookaheadCount, lookahead,
2451 context_closure_recurse_lookups (c,
2453 lookupCount, lookupRecord,
2455 lookup_context.context_format,
2456 lookup_context.intersects_data[1],
2457 lookup_context.funcs.intersected_glyphs);
2460 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2461 unsigned int backtrackCount,
2462 const HBUINT16 backtrack[],
2463 unsigned int inputCount, /* Including the first glyph (not matched) */
2464 const HBUINT16 input[], /* Array of input values--start with second glyph */
2465 unsigned int lookaheadCount,
2466 const HBUINT16 lookahead[],
2467 unsigned int lookupCount,
2468 const LookupRecord lookupRecord[],
2469 ChainContextCollectGlyphsLookupContext &lookup_context)
2471 collect_array (c, c->before,
2472 backtrackCount, backtrack,
2473 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2474 collect_array (c, c->input,
2475 inputCount ? inputCount - 1 : 0, input,
2476 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2477 collect_array (c, c->after,
2478 lookaheadCount, lookahead,
2479 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2481 lookupCount, lookupRecord);
2484 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2485 unsigned int backtrackCount,
2486 const HBUINT16 backtrack[] HB_UNUSED,
2487 unsigned int inputCount, /* Including the first glyph (not matched) */
2488 const HBUINT16 input[], /* Array of input values--start with second glyph */
2489 unsigned int lookaheadCount,
2490 const HBUINT16 lookahead[] HB_UNUSED,
2491 unsigned int lookupCount HB_UNUSED,
2492 const LookupRecord lookupRecord[] HB_UNUSED,
2493 ChainContextApplyLookupContext &lookup_context)
2495 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2496 && would_match_input (c,
2498 lookup_context.funcs.match, lookup_context.match_data[1]);
2501 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2502 unsigned int backtrackCount,
2503 const HBUINT16 backtrack[],
2504 unsigned int inputCount, /* Including the first glyph (not matched) */
2505 const HBUINT16 input[], /* Array of input values--start with second glyph */
2506 unsigned int lookaheadCount,
2507 const HBUINT16 lookahead[],
2508 unsigned int lookupCount,
2509 const LookupRecord lookupRecord[],
2510 ChainContextApplyLookupContext &lookup_context)
2512 unsigned end_index = c->buffer->idx;
2513 unsigned match_end = 0;
2514 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
2515 if (!(match_input (c,
2517 lookup_context.funcs.match, lookup_context.match_data[1],
2518 &match_end, match_positions) && (end_index = match_end)
2519 && match_lookahead (c,
2520 lookaheadCount, lookahead,
2521 lookup_context.funcs.match, lookup_context.match_data[2],
2522 match_end, &end_index)))
2524 c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
2528 unsigned start_index = c->buffer->out_len;
2529 if (!match_backtrack (c,
2530 backtrackCount, backtrack,
2531 lookup_context.funcs.match, lookup_context.match_data[0],
2534 c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
2538 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
2540 inputCount, match_positions,
2541 lookupCount, lookupRecord,
2548 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2550 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2551 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2552 return chain_context_intersects (glyphs,
2553 backtrack.len, backtrack.arrayZ,
2554 input.lenP1, input.arrayZ,
2555 lookahead.len, lookahead.arrayZ,
2559 void closure (hb_closure_context_t *c, unsigned value,
2560 ChainContextClosureLookupContext &lookup_context) const
2562 if (unlikely (c->lookup_limit_exceeded ())) return;
2564 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2565 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2566 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2567 chain_context_closure_lookup (c,
2568 backtrack.len, backtrack.arrayZ,
2569 input.lenP1, input.arrayZ,
2570 lookahead.len, lookahead.arrayZ,
2571 lookup.len, lookup.arrayZ,
2576 void closure_lookups (hb_closure_lookups_context_t *c,
2577 ChainContextClosureLookupContext &lookup_context) const
2579 if (unlikely (c->lookup_limit_exceeded ())) return;
2580 if (!intersects (c->glyphs, lookup_context)) return;
2582 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2583 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2584 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2585 recurse_lookups (c, lookup.len, lookup.arrayZ);
2588 void collect_glyphs (hb_collect_glyphs_context_t *c,
2589 ChainContextCollectGlyphsLookupContext &lookup_context) const
2591 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2592 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2593 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2594 chain_context_collect_glyphs_lookup (c,
2595 backtrack.len, backtrack.arrayZ,
2596 input.lenP1, input.arrayZ,
2597 lookahead.len, lookahead.arrayZ,
2598 lookup.len, lookup.arrayZ,
2602 bool would_apply (hb_would_apply_context_t *c,
2603 ChainContextApplyLookupContext &lookup_context) const
2605 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2606 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2607 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2608 return chain_context_would_apply_lookup (c,
2609 backtrack.len, backtrack.arrayZ,
2610 input.lenP1, input.arrayZ,
2611 lookahead.len, lookahead.arrayZ, lookup.len,
2612 lookup.arrayZ, lookup_context);
2615 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2618 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2619 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2620 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2621 return_trace (chain_context_apply_lookup (c,
2622 backtrack.len, backtrack.arrayZ,
2623 input.lenP1, input.arrayZ,
2624 lookahead.len, lookahead.arrayZ, lookup.len,
2625 lookup.arrayZ, lookup_context));
2628 template<typename Iterator,
2629 hb_requires (hb_is_iterator (Iterator))>
2630 void serialize_array (hb_serialize_context_t *c,
2635 for (const auto g : it)
2636 c->copy ((HBUINT16) g);
2639 bool serialize (hb_serialize_context_t *c,
2640 const hb_map_t *lookup_map,
2641 const hb_map_t *backtrack_map,
2642 const hb_map_t *input_map = nullptr,
2643 const hb_map_t *lookahead_map = nullptr) const
2645 TRACE_SERIALIZE (this);
2646 auto *out = c->start_embed (this);
2647 if (unlikely (!out)) return_trace (false);
2649 const hb_map_t *mapping = backtrack_map;
2650 serialize_array (c, backtrack.len, + backtrack.iter ()
2651 | hb_map (mapping));
2653 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2654 if (input_map) mapping = input_map;
2655 serialize_array (c, input.lenP1, + input.iter ()
2656 | hb_map (mapping));
2658 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2659 if (lookahead_map) mapping = lookahead_map;
2660 serialize_array (c, lookahead.len, + lookahead.iter ()
2661 | hb_map (mapping));
2663 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
2665 HBUINT16* lookupCount = c->embed (&(lookupRecord.len));
2666 if (!lookupCount) return_trace (false);
2668 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (), lookup_map);
2669 return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2672 bool subset (hb_subset_context_t *c,
2673 const hb_map_t *lookup_map,
2674 const hb_map_t *backtrack_map = nullptr,
2675 const hb_map_t *input_map = nullptr,
2676 const hb_map_t *lookahead_map = nullptr) const
2678 TRACE_SUBSET (this);
2680 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2681 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2685 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2686 if (!hb_all (backtrack, glyphset) ||
2687 !hb_all (input, glyphset) ||
2688 !hb_all (lookahead, glyphset))
2689 return_trace (false);
2691 serialize (c->serializer, lookup_map, c->plan->glyph_map);
2695 if (!hb_all (backtrack, backtrack_map) ||
2696 !hb_all (input, input_map) ||
2697 !hb_all (lookahead, lookahead_map))
2698 return_trace (false);
2700 serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2703 return_trace (true);
2706 bool sanitize (hb_sanitize_context_t *c) const
2708 TRACE_SANITIZE (this);
2709 if (!backtrack.sanitize (c)) return_trace (false);
2710 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2711 if (!input.sanitize (c)) return_trace (false);
2712 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2713 if (!lookahead.sanitize (c)) return_trace (false);
2714 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2715 return_trace (lookup.sanitize (c));
2720 backtrack; /* Array of backtracking values
2721 * (to be matched before the input
2723 HeadlessArrayOf<HBUINT16>
2724 inputX; /* Array of input values (start with
2727 lookaheadX; /* Array of lookahead values's (to be
2728 * matched after the input sequence) */
2729 Array16Of<LookupRecord>
2730 lookupX; /* Array of LookupRecords--in
2733 DEFINE_SIZE_MIN (8);
2738 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2742 | hb_map (hb_add (this))
2743 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2747 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
2749 if (unlikely (c->lookup_limit_exceeded ())) return;
2753 | hb_map (hb_add (this))
2754 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
2758 void closure_lookups (hb_closure_lookups_context_t *c,
2759 ChainContextClosureLookupContext &lookup_context) const
2761 if (unlikely (c->lookup_limit_exceeded ())) return;
2764 | hb_map (hb_add (this))
2765 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
2769 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2773 | hb_map (hb_add (this))
2774 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2778 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2782 | hb_map (hb_add (this))
2783 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2788 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2793 | hb_map (hb_add (this))
2794 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2800 bool subset (hb_subset_context_t *c,
2801 const hb_map_t *lookup_map,
2802 const hb_map_t *backtrack_klass_map = nullptr,
2803 const hb_map_t *input_klass_map = nullptr,
2804 const hb_map_t *lookahead_klass_map = nullptr) const
2806 TRACE_SUBSET (this);
2808 auto snap = c->serializer->snapshot ();
2809 auto *out = c->serializer->start_embed (*this);
2810 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2812 for (const Offset16To<ChainRule>& _ : rule)
2815 auto o_snap = c->serializer->snapshot ();
2816 auto *o = out->rule.serialize_append (c->serializer);
2817 if (unlikely (!o)) continue;
2819 if (!o->serialize_subset (c, _, this,
2821 backtrack_klass_map,
2823 lookahead_klass_map))
2826 c->serializer->revert (o_snap);
2830 bool ret = bool (out->rule);
2831 if (!ret) c->serializer->revert (snap);
2836 bool sanitize (hb_sanitize_context_t *c) const
2838 TRACE_SANITIZE (this);
2839 return_trace (rule.sanitize (c, this));
2843 Array16OfOffset16To<ChainRule>
2844 rule; /* Array of ChainRule tables
2845 * ordered by preference */
2847 DEFINE_SIZE_ARRAY (2, rule);
2850 struct ChainContextFormat1
2852 bool intersects (const hb_set_t *glyphs) const
2854 struct ChainContextClosureLookupContext lookup_context = {
2855 {intersects_glyph, intersected_glyph},
2856 ContextFormat::SimpleContext,
2857 {nullptr, nullptr, nullptr}
2861 + hb_zip (this+coverage, ruleSet)
2862 | hb_filter (*glyphs, hb_first)
2863 | hb_map (hb_second)
2864 | hb_map (hb_add (this))
2865 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2870 bool may_have_non_1to1 () const
2873 void closure (hb_closure_context_t *c) const
2875 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2876 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2879 struct ChainContextClosureLookupContext lookup_context = {
2880 {intersects_glyph, intersected_glyph},
2881 ContextFormat::SimpleContext,
2882 {nullptr, nullptr, nullptr}
2885 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2886 | hb_filter ([&] (hb_codepoint_t _) {
2887 return c->previous_parent_active_glyphs ().has (_);
2889 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
2890 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2893 c->pop_cur_done_glyphs ();
2896 void closure_lookups (hb_closure_lookups_context_t *c) const
2898 struct ChainContextClosureLookupContext lookup_context = {
2899 {intersects_glyph, intersected_glyph},
2900 ContextFormat::SimpleContext,
2901 {nullptr, nullptr, nullptr}
2904 + hb_zip (this+coverage, ruleSet)
2905 | hb_filter (*c->glyphs, hb_first)
2906 | hb_map (hb_second)
2907 | hb_map (hb_add (this))
2908 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
2912 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2914 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2916 (this+coverage).collect_coverage (c->input);
2918 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2920 {nullptr, nullptr, nullptr}
2924 | hb_map (hb_add (this))
2925 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2929 bool would_apply (hb_would_apply_context_t *c) const
2931 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2932 struct ChainContextApplyLookupContext lookup_context = {
2934 {nullptr, nullptr, nullptr}
2936 return rule_set.would_apply (c, lookup_context);
2939 const Coverage &get_coverage () const { return this+coverage; }
2941 bool apply (hb_ot_apply_context_t *c) const
2944 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2945 if (likely (index == NOT_COVERED)) return_trace (false);
2947 const ChainRuleSet &rule_set = this+ruleSet[index];
2948 struct ChainContextApplyLookupContext lookup_context = {
2950 {nullptr, nullptr, nullptr}
2952 return_trace (rule_set.apply (c, lookup_context));
2955 bool subset (hb_subset_context_t *c) const
2957 TRACE_SUBSET (this);
2958 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2959 const hb_map_t &glyph_map = *c->plan->glyph_map;
2961 auto *out = c->serializer->start_embed (*this);
2962 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2963 out->format = format;
2965 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2966 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2967 + hb_zip (this+coverage, ruleSet)
2968 | hb_filter (glyphset, hb_first)
2969 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2971 | hb_map (glyph_map)
2972 | hb_sink (new_coverage)
2975 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
2976 return_trace (bool (new_coverage));
2979 bool sanitize (hb_sanitize_context_t *c) const
2981 TRACE_SANITIZE (this);
2982 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2986 HBUINT16 format; /* Format identifier--format = 1 */
2987 Offset16To<Coverage>
2988 coverage; /* Offset to Coverage table--from
2989 * beginning of table */
2990 Array16OfOffset16To<ChainRuleSet>
2991 ruleSet; /* Array of ChainRuleSet tables
2992 * ordered by Coverage Index */
2994 DEFINE_SIZE_ARRAY (6, ruleSet);
2997 struct ChainContextFormat2
2999 bool intersects (const hb_set_t *glyphs) const
3001 if (!(this+coverage).intersects (glyphs))
3004 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3005 const ClassDef &input_class_def = this+inputClassDef;
3006 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3008 struct ChainContextClosureLookupContext lookup_context = {
3009 {intersects_class, intersected_class_glyphs},
3010 ContextFormat::ClassBasedContext,
3011 {&backtrack_class_def,
3013 &lookahead_class_def}
3016 hb_set_t retained_coverage_glyphs;
3017 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
3019 hb_set_t coverage_glyph_classes;
3020 input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3024 | hb_map (hb_add (this))
3026 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
3027 { return input_class_def.intersects_class (glyphs, p.first) &&
3028 coverage_glyph_classes.has (p.first) &&
3029 p.second.intersects (glyphs, lookup_context); })
3034 bool may_have_non_1to1 () const
3037 void closure (hb_closure_context_t *c) const
3039 if (!(this+coverage).intersects (c->glyphs))
3042 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
3043 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
3047 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3048 const ClassDef &input_class_def = this+inputClassDef;
3049 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3051 struct ChainContextClosureLookupContext lookup_context = {
3052 {intersects_class, intersected_class_glyphs},
3053 ContextFormat::ClassBasedContext,
3054 {&backtrack_class_def,
3056 &lookahead_class_def}
3059 + hb_enumerate (ruleSet)
3060 | hb_filter ([&] (unsigned _)
3061 { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
3063 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<ChainRuleSet>&> _)
3065 const ChainRuleSet& chainrule_set = this+_.second;
3066 chainrule_set.closure (c, _.first, lookup_context);
3070 c->pop_cur_done_glyphs ();
3073 void closure_lookups (hb_closure_lookups_context_t *c) const
3075 if (!(this+coverage).intersects (c->glyphs))
3078 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3079 const ClassDef &input_class_def = this+inputClassDef;
3080 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3082 struct ChainContextClosureLookupContext lookup_context = {
3083 {intersects_class, intersected_class_glyphs},
3084 ContextFormat::ClassBasedContext,
3085 {&backtrack_class_def,
3087 &lookahead_class_def}
3091 | hb_map (hb_add (this))
3093 | hb_filter([&] (unsigned klass)
3094 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
3095 | hb_map (hb_second)
3096 | hb_apply ([&] (const ChainRuleSet &_)
3097 { _.closure_lookups (c, lookup_context); })
3101 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3103 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3105 (this+coverage).collect_coverage (c->input);
3107 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3108 const ClassDef &input_class_def = this+inputClassDef;
3109 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3111 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3113 {&backtrack_class_def,
3115 &lookahead_class_def}
3119 | hb_map (hb_add (this))
3120 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3124 bool would_apply (hb_would_apply_context_t *c) const
3126 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3127 const ClassDef &input_class_def = this+inputClassDef;
3128 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3130 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3131 const ChainRuleSet &rule_set = this+ruleSet[index];
3132 struct ChainContextApplyLookupContext lookup_context = {
3134 {&backtrack_class_def,
3136 &lookahead_class_def}
3138 return rule_set.would_apply (c, lookup_context);
3141 const Coverage &get_coverage () const { return this+coverage; }
3143 bool apply (hb_ot_apply_context_t *c) const
3146 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3147 if (likely (index == NOT_COVERED)) return_trace (false);
3149 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3150 const ClassDef &input_class_def = this+inputClassDef;
3151 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3153 index = input_class_def.get_class (c->buffer->cur().codepoint);
3154 const ChainRuleSet &rule_set = this+ruleSet[index];
3155 struct ChainContextApplyLookupContext lookup_context = {
3157 {&backtrack_class_def,
3159 &lookahead_class_def}
3161 return_trace (rule_set.apply (c, lookup_context));
3164 bool subset (hb_subset_context_t *c) const
3166 TRACE_SUBSET (this);
3167 auto *out = c->serializer->start_embed (*this);
3168 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3169 out->format = format;
3170 out->coverage.serialize_subset (c, coverage, this);
3172 hb_map_t backtrack_klass_map;
3173 hb_map_t input_klass_map;
3174 hb_map_t lookahead_klass_map;
3176 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3177 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3178 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3179 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3181 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3183 lookahead_klass_map)))
3184 return_trace (false);
3186 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
3187 hb_set_t retained_coverage_glyphs;
3188 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
3190 hb_set_t coverage_glyph_classes;
3191 (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3193 int non_zero_index = -1, index = 0;
3195 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3196 auto last_non_zero = c->serializer->snapshot ();
3197 for (const auto& _ : + hb_enumerate (ruleSet)
3198 | hb_filter (input_klass_map, hb_first))
3200 auto *o = out->ruleSet.serialize_append (c->serializer);
3206 if (coverage_glyph_classes.has (_.first) &&
3207 o->serialize_subset (c, _.second, this,
3209 &backtrack_klass_map,
3211 &lookahead_klass_map))
3213 last_non_zero = c->serializer->snapshot ();
3214 non_zero_index = index;
3220 if (!ret || non_zero_index == -1) return_trace (false);
3222 // prune empty trailing ruleSets
3223 if (index > non_zero_index) {
3224 c->serializer->revert (last_non_zero);
3225 out->ruleSet.len = non_zero_index + 1;
3228 return_trace (bool (out->ruleSet));
3231 bool sanitize (hb_sanitize_context_t *c) const
3233 TRACE_SANITIZE (this);
3234 return_trace (coverage.sanitize (c, this) &&
3235 backtrackClassDef.sanitize (c, this) &&
3236 inputClassDef.sanitize (c, this) &&
3237 lookaheadClassDef.sanitize (c, this) &&
3238 ruleSet.sanitize (c, this));
3242 HBUINT16 format; /* Format identifier--format = 2 */
3243 Offset16To<Coverage>
3244 coverage; /* Offset to Coverage table--from
3245 * beginning of table */
3246 Offset16To<ClassDef>
3247 backtrackClassDef; /* Offset to glyph ClassDef table
3248 * containing backtrack sequence
3249 * data--from beginning of table */
3250 Offset16To<ClassDef>
3251 inputClassDef; /* Offset to glyph ClassDef
3252 * table containing input sequence
3253 * data--from beginning of table */
3254 Offset16To<ClassDef>
3255 lookaheadClassDef; /* Offset to glyph ClassDef table
3256 * containing lookahead sequence
3257 * data--from beginning of table */
3258 Array16OfOffset16To<ChainRuleSet>
3259 ruleSet; /* Array of ChainRuleSet tables
3260 * ordered by class */
3262 DEFINE_SIZE_ARRAY (12, ruleSet);
3265 struct ChainContextFormat3
3267 bool intersects (const hb_set_t *glyphs) const
3269 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3271 if (!(this+input[0]).intersects (glyphs))
3274 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3275 struct ChainContextClosureLookupContext lookup_context = {
3276 {intersects_coverage, intersected_coverage_glyphs},
3277 ContextFormat::CoverageBasedContext,
3280 return chain_context_intersects (glyphs,
3281 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3282 input.len, (const HBUINT16 *) input.arrayZ + 1,
3283 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3287 bool may_have_non_1to1 () const
3290 void closure (hb_closure_context_t *c) const
3292 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3294 if (!(this+input[0]).intersects (c->glyphs))
3297 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
3298 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
3302 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3303 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3304 struct ChainContextClosureLookupContext lookup_context = {
3305 {intersects_coverage, intersected_coverage_glyphs},
3306 ContextFormat::CoverageBasedContext,
3309 chain_context_closure_lookup (c,
3310 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3311 input.len, (const HBUINT16 *) input.arrayZ + 1,
3312 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3313 lookup.len, lookup.arrayZ,
3316 c->pop_cur_done_glyphs ();
3319 void closure_lookups (hb_closure_lookups_context_t *c) const
3321 if (!intersects (c->glyphs))
3324 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3325 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3326 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3327 recurse_lookups (c, lookup.len, lookup.arrayZ);
3330 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3332 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3334 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3336 (this+input[0]).collect_coverage (c->input);
3338 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3339 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3340 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3344 chain_context_collect_glyphs_lookup (c,
3345 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3346 input.len, (const HBUINT16 *) input.arrayZ + 1,
3347 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3348 lookup.len, lookup.arrayZ,
3352 bool would_apply (hb_would_apply_context_t *c) const
3354 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3355 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3356 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3357 struct ChainContextApplyLookupContext lookup_context = {
3361 return chain_context_would_apply_lookup (c,
3362 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3363 input.len, (const HBUINT16 *) input.arrayZ + 1,
3364 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3365 lookup.len, lookup.arrayZ, lookup_context);
3368 const Coverage &get_coverage () const
3370 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3371 return this+input[0];
3374 bool apply (hb_ot_apply_context_t *c) const
3377 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3379 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
3380 if (likely (index == NOT_COVERED)) return_trace (false);
3382 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3383 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3384 struct ChainContextApplyLookupContext lookup_context = {
3388 return_trace (chain_context_apply_lookup (c,
3389 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3390 input.len, (const HBUINT16 *) input.arrayZ + 1,
3391 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3392 lookup.len, lookup.arrayZ, lookup_context));
3395 template<typename Iterator,
3396 hb_requires (hb_is_iterator (Iterator))>
3397 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3399 TRACE_SERIALIZE (this);
3400 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
3402 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
3403 return_trace (false);
3405 for (auto& offset : it) {
3406 auto *o = out->serialize_append (c->serializer);
3407 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
3408 return_trace (false);
3411 return_trace (true);
3414 bool subset (hb_subset_context_t *c) const
3416 TRACE_SUBSET (this);
3418 auto *out = c->serializer->start_embed (this);
3419 if (unlikely (!out)) return_trace (false);
3420 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3422 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3423 return_trace (false);
3425 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3426 if (!serialize_coverage_offsets (c, input.iter (), this))
3427 return_trace (false);
3429 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3430 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3431 return_trace (false);
3433 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
3434 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3436 HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookupRecord.len);
3437 if (!lookupCount) return_trace (false);
3439 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (), lookup_map);
3440 return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3443 bool sanitize (hb_sanitize_context_t *c) const
3445 TRACE_SANITIZE (this);
3446 if (!backtrack.sanitize (c, this)) return_trace (false);
3447 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3448 if (!input.sanitize (c, this)) return_trace (false);
3449 if (!input.len) return_trace (false); /* To be consistent with Context. */
3450 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3451 if (!lookahead.sanitize (c, this)) return_trace (false);
3452 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3453 return_trace (lookup.sanitize (c));
3457 HBUINT16 format; /* Format identifier--format = 3 */
3458 Array16OfOffset16To<Coverage>
3459 backtrack; /* Array of coverage tables
3460 * in backtracking sequence, in glyph
3462 Array16OfOffset16To<Coverage>
3463 inputX ; /* Array of coverage
3464 * tables in input sequence, in glyph
3466 Array16OfOffset16To<Coverage>
3467 lookaheadX; /* Array of coverage tables
3468 * in lookahead sequence, in glyph
3470 Array16Of<LookupRecord>
3471 lookupX; /* Array of LookupRecords--in
3474 DEFINE_SIZE_MIN (10);
3479 template <typename context_t, typename ...Ts>
3480 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3482 TRACE_DISPATCH (this, u.format);
3483 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3485 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3486 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
3487 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
3488 default:return_trace (c->default_return_value ());
3494 HBUINT16 format; /* Format identifier */
3495 ChainContextFormat1 format1;
3496 ChainContextFormat2 format2;
3497 ChainContextFormat3 format3;
3502 template <typename T>
3503 struct ExtensionFormat1
3505 unsigned int get_type () const { return extensionLookupType; }
3507 template <typename X>
3508 const X& get_subtable () const
3509 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
3511 template <typename context_t, typename ...Ts>
3512 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3514 TRACE_DISPATCH (this, format);
3515 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3516 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
3519 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3522 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
3523 bool sanitize (hb_sanitize_context_t *c) const
3525 TRACE_SANITIZE (this);
3526 return_trace (c->check_struct (this) &&
3527 extensionLookupType != T::SubTable::Extension);
3530 bool subset (hb_subset_context_t *c) const
3532 TRACE_SUBSET (this);
3534 auto *out = c->serializer->start_embed (this);
3535 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3537 out->format = format;
3538 out->extensionLookupType = extensionLookupType;
3540 const auto& src_offset =
3541 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
3543 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
3545 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
3549 HBUINT16 format; /* Format identifier. Set to 1. */
3550 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3551 * by ExtensionOffset (i.e. the
3552 * extension subtable). */
3553 Offset32 extensionOffset; /* Offset to the extension subtable,
3554 * of lookup type subtable. */
3556 DEFINE_SIZE_STATIC (8);
3559 template <typename T>
3562 unsigned int get_type () const
3565 case 1: return u.format1.get_type ();
3569 template <typename X>
3570 const X& get_subtable () const
3573 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3574 default:return Null (typename T::SubTable);
3578 // Specialization of dispatch for subset. dispatch() normally just
3579 // dispatches to the sub table this points too, but for subset
3580 // we need to run subset on this subtable too.
3581 template <typename ...Ts>
3582 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
3585 case 1: return u.format1.subset (c);
3586 default: return c->default_return_value ();
3590 template <typename context_t, typename ...Ts>
3591 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3593 TRACE_DISPATCH (this, u.format);
3594 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3596 case 1: return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
3597 default:return_trace (c->default_return_value ());
3603 HBUINT16 format; /* Format identifier */
3604 ExtensionFormat1<T> format1;
3613 struct hb_ot_layout_lookup_accelerator_t
3615 template <typename TLookup>
3616 void init (const TLookup &lookup)
3619 lookup.collect_coverage (&digest);
3622 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3623 lookup.dispatch (&c_get_subtables);
3625 void fini () { subtables.fini (); }
3627 bool may_have (hb_codepoint_t g) const
3628 { return digest.may_have (g); }
3630 bool apply (hb_ot_apply_context_t *c) const
3632 for (unsigned int i = 0; i < subtables.length; i++)
3633 if (subtables[i].apply (c))
3639 hb_set_digest_t digest;
3640 hb_get_subtables_context_t::array_t subtables;
3645 bool has_data () const { return version.to_int (); }
3646 unsigned int get_script_count () const
3647 { return (this+scriptList).len; }
3648 const Tag& get_script_tag (unsigned int i) const
3649 { return (this+scriptList).get_tag (i); }
3650 unsigned int get_script_tags (unsigned int start_offset,
3651 unsigned int *script_count /* IN/OUT */,
3652 hb_tag_t *script_tags /* OUT */) const
3653 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
3654 const Script& get_script (unsigned int i) const
3655 { return (this+scriptList)[i]; }
3656 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3657 { return (this+scriptList).find_index (tag, index); }
3659 unsigned int get_feature_count () const
3660 { return (this+featureList).len; }
3661 hb_tag_t get_feature_tag (unsigned int i) const
3662 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
3663 unsigned int get_feature_tags (unsigned int start_offset,
3664 unsigned int *feature_count /* IN/OUT */,
3665 hb_tag_t *feature_tags /* OUT */) const
3666 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
3667 const Feature& get_feature (unsigned int i) const
3668 { return (this+featureList)[i]; }
3669 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3670 { return (this+featureList).find_index (tag, index); }
3672 unsigned int get_lookup_count () const
3673 { return (this+lookupList).len; }
3674 const Lookup& get_lookup (unsigned int i) const
3675 { return (this+lookupList)[i]; }
3677 bool find_variations_index (const int *coords, unsigned int num_coords,
3678 unsigned int *index) const
3681 *index = FeatureVariations::NOT_FOUND_INDEX;
3684 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3685 .find_index (coords, num_coords, index);
3687 const Feature& get_feature_variation (unsigned int feature_index,
3688 unsigned int variations_index) const
3691 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3692 version.to_int () >= 0x00010001u)
3694 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3700 return get_feature (feature_index);
3703 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3704 hb_set_t *lookup_indexes /* OUT */) const
3707 if (version.to_int () >= 0x00010001u)
3708 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3712 template <typename TLookup>
3713 void closure_lookups (hb_face_t *face,
3714 const hb_set_t *glyphs,
3715 hb_set_t *lookup_indexes /* IN/OUT */) const
3717 hb_set_t visited_lookups, inactive_lookups;
3718 OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
3720 for (unsigned lookup_index : + hb_iter (lookup_indexes))
3721 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
3723 hb_set_union (lookup_indexes, &visited_lookups);
3724 hb_set_subtract (lookup_indexes, &inactive_lookups);
3727 void prune_langsys (const hb_map_t *duplicate_feature_map,
3728 hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map,
3729 hb_set_t *new_feature_indexes /* OUT */) const
3731 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
3733 unsigned count = get_script_count ();
3734 for (unsigned script_index = 0; script_index < count; script_index++)
3736 const Script& s = get_script (script_index);
3737 s.prune_langsys (&c, script_index);
3741 template <typename TLookup>
3742 bool subset (hb_subset_layout_context_t *c) const
3744 TRACE_SUBSET (this);
3745 auto *out = c->subset_context->serializer->embed (*this);
3746 if (unlikely (!out)) return_trace (false);
3748 typedef LookupOffsetList<TLookup> TLookupList;
3749 reinterpret_cast<Offset16To<TLookupList> &> (out->lookupList)
3750 .serialize_subset (c->subset_context,
3751 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList),
3755 reinterpret_cast<Offset16To<RecordListOfFeature> &> (out->featureList)
3756 .serialize_subset (c->subset_context,
3757 reinterpret_cast<const Offset16To<RecordListOfFeature> &> (featureList),
3761 out->scriptList.serialize_subset (c->subset_context,
3767 if (version.to_int () >= 0x00010001u)
3769 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3772 out->version.major = 1;
3773 out->version.minor = 0;
3778 return_trace (true);
3781 void find_duplicate_features (const hb_map_t *lookup_indices,
3782 const hb_set_t *feature_indices,
3783 hb_map_t *duplicate_feature_map /* OUT */) const
3785 if (feature_indices->is_empty ()) return;
3786 hb_hashmap_t<hb_tag_t, hb_set_t *> unique_features;
3787 //find out duplicate features after subset
3788 for (unsigned i : feature_indices->iter ())
3790 hb_tag_t t = get_feature_tag (i);
3791 if (t == HB_MAP_VALUE_INVALID) continue;
3792 if (!unique_features.has (t))
3794 hb_set_t* indices = hb_set_create ();
3795 if (unlikely (indices == hb_set_get_empty () ||
3796 !unique_features.set (t, indices)))
3798 hb_set_destroy (indices);
3799 for (auto _ : unique_features.iter ())
3800 hb_set_destroy (_.second);
3803 if (unique_features.get (t))
3804 unique_features.get (t)->add (i);
3805 duplicate_feature_map->set (i, i);
3811 hb_set_t* same_tag_features = unique_features.get (t);
3812 for (unsigned other_f_index : same_tag_features->iter ())
3814 const Feature& f = get_feature (i);
3815 const Feature& other_f = get_feature (other_f_index);
3818 + hb_iter (f.lookupIndex)
3819 | hb_filter (lookup_indices)
3823 + hb_iter (other_f.lookupIndex)
3824 | hb_filter (lookup_indices)
3827 bool is_equal = true;
3828 for (; f_iter && other_f_iter; f_iter++, other_f_iter++)
3830 unsigned a = *f_iter;
3831 unsigned b = *other_f_iter;
3832 if (a != b) { is_equal = false; break; }
3835 if (is_equal == false || f_iter || other_f_iter) continue;
3838 duplicate_feature_map->set (i, other_f_index);
3844 same_tag_features->add (i);
3845 duplicate_feature_map->set (i, i);
3849 for (auto _ : unique_features.iter ())
3850 hb_set_destroy (_.second);
3853 void prune_features (const hb_map_t *lookup_indices, /* IN */
3854 hb_set_t *feature_indices /* IN/OUT */) const
3857 // This is the set of feature indices which have alternate versions defined
3858 // if the FeatureVariation's table and the alternate version(s) intersect the
3859 // set of lookup indices.
3860 hb_set_t alternate_feature_indices;
3861 if (version.to_int () >= 0x00010001u)
3862 (this+featureVars).closure_features (lookup_indices, &alternate_feature_indices);
3863 if (unlikely (alternate_feature_indices.in_error()))
3865 feature_indices->err ();
3870 for (unsigned i : feature_indices->iter())
3872 const Feature& f = get_feature (i);
3873 hb_tag_t tag = get_feature_tag (i);
3874 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
3875 // Note: Never ever drop feature 'pref', even if it's empty.
3876 // HarfBuzz chooses shaper for Khmer based on presence of this
3877 // feature. See thread at:
3878 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
3882 if (!f.featureParams.is_null () &&
3883 tag == HB_TAG ('s', 'i', 'z', 'e'))
3886 if (!f.intersects_lookup_indexes (lookup_indices)
3888 && !alternate_feature_indices.has (i)
3891 feature_indices->del (i);
3895 unsigned int get_size () const
3898 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3901 template <typename TLookup>
3902 bool sanitize (hb_sanitize_context_t *c) const
3904 TRACE_SANITIZE (this);
3905 typedef List16OfOffset16To<TLookup> TLookupList;
3906 if (unlikely (!(version.sanitize (c) &&
3907 likely (version.major == 1) &&
3908 scriptList.sanitize (c, this) &&
3909 featureList.sanitize (c, this) &&
3910 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList).sanitize (c, this))))
3911 return_trace (false);
3914 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3915 return_trace (false);
3918 return_trace (true);
3921 template <typename T>
3922 struct accelerator_t
3924 accelerator_t (hb_face_t *face)
3926 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3927 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
3929 hb_blob_destroy (this->table.get_blob ());
3930 this->table = hb_blob_get_empty ();
3933 this->lookup_count = table->get_lookup_count ();
3935 this->accels = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3936 if (unlikely (!this->accels))
3938 this->lookup_count = 0;
3939 this->table.destroy ();
3940 this->table = hb_blob_get_empty ();
3943 for (unsigned int i = 0; i < this->lookup_count; i++)
3944 this->accels[i].init (table->get_lookup (i));
3948 for (unsigned int i = 0; i < this->lookup_count; i++)
3949 this->accels[i].fini ();
3950 hb_free (this->accels);
3951 this->table.destroy ();
3954 hb_blob_ptr_t<T> table;
3955 unsigned int lookup_count;
3956 hb_ot_layout_lookup_accelerator_t *accels;
3960 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3962 Offset16To<ScriptList>
3963 scriptList; /* ScriptList table */
3964 Offset16To<FeatureList>
3965 featureList; /* FeatureList table */
3966 Offset16To<LookupList>
3967 lookupList; /* LookupList table */
3968 Offset32To<FeatureVariations>
3969 featureVars; /* Offset to Feature Variations
3970 table--from beginning of table
3971 * (may be NULL). Introduced
3972 * in version 0x00010001. */
3974 DEFINE_SIZE_MIN (10);
3978 } /* namespace OT */
3981 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */