2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUB_TABLE_HH
30 #define HB_OT_LAYOUT_GSUB_TABLE_HH
32 #include "hb-ot-layout-gsubgpos.hh"
37 typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> hb_codepoint_pair_t;
39 template<typename Iterator>
40 static void SingleSubst_serialize (hb_serialize_context_t *c,
44 struct SingleSubstFormat1
46 bool intersects (const hb_set_t *glyphs) const
47 { return (this+coverage).intersects (glyphs); }
49 void closure (hb_closure_context_t *c) const
51 unsigned d = deltaGlyphID;
52 + hb_iter (this+coverage)
53 | hb_filter (*c->glyphs)
54 | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; })
59 void closure_lookups (hb_closure_lookups_context_t *c) const {}
61 void collect_glyphs (hb_collect_glyphs_context_t *c) const
63 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
64 unsigned d = deltaGlyphID;
65 + hb_iter (this+coverage)
66 | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; })
71 const Coverage &get_coverage () const { return this+coverage; }
73 bool would_apply (hb_would_apply_context_t *c) const
74 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
76 bool apply (hb_ot_apply_context_t *c) const
79 hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
80 unsigned int index = (this+coverage).get_coverage (glyph_id);
81 if (likely (index == NOT_COVERED)) return_trace (false);
83 /* According to the Adobe Annotated OpenType Suite, result is always
84 * limited to 16bit. */
85 glyph_id = (glyph_id + deltaGlyphID) & 0xFFFFu;
86 c->replace_glyph (glyph_id);
91 template<typename Iterator,
92 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
93 bool serialize (hb_serialize_context_t *c,
97 TRACE_SERIALIZE (this);
98 if (unlikely (!c->extend_min (*this))) return_trace (false);
99 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
100 c->check_assign (deltaGlyphID, delta);
104 bool subset (hb_subset_context_t *c) const
107 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
108 const hb_map_t &glyph_map = *c->plan->glyph_map;
110 hb_codepoint_t delta = deltaGlyphID;
113 + hb_iter (this+coverage)
114 | hb_filter (glyphset)
115 | hb_map_retains_sorting ([&] (hb_codepoint_t g) {
116 return hb_codepoint_pair_t (g,
117 (g + delta) & 0xFFFF); })
118 | hb_filter (glyphset, hb_second)
119 | hb_map_retains_sorting ([&] (hb_codepoint_pair_t p) -> hb_codepoint_pair_t
120 { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
123 bool ret = bool (it);
124 SingleSubst_serialize (c->serializer, it);
128 bool sanitize (hb_sanitize_context_t *c) const
130 TRACE_SANITIZE (this);
131 return_trace (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
135 HBUINT16 format; /* Format identifier--format = 1 */
137 coverage; /* Offset to Coverage table--from
138 * beginning of Substitution table */
139 HBUINT16 deltaGlyphID; /* Add to original GlyphID to get
140 * substitute GlyphID, modulo 0x10000 */
142 DEFINE_SIZE_STATIC (6);
145 struct SingleSubstFormat2
147 bool intersects (const hb_set_t *glyphs) const
148 { return (this+coverage).intersects (glyphs); }
150 void closure (hb_closure_context_t *c) const
152 + hb_zip (this+coverage, substitute)
153 | hb_filter (*c->glyphs, hb_first)
155 | hb_sink (c->output)
159 void closure_lookups (hb_closure_lookups_context_t *c) const {}
161 void collect_glyphs (hb_collect_glyphs_context_t *c) const
163 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
164 + hb_zip (this+coverage, substitute)
166 | hb_sink (c->output)
170 const Coverage &get_coverage () const { return this+coverage; }
172 bool would_apply (hb_would_apply_context_t *c) const
173 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
175 bool apply (hb_ot_apply_context_t *c) const
178 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
179 if (likely (index == NOT_COVERED)) return_trace (false);
181 if (unlikely (index >= substitute.len)) return_trace (false);
183 c->replace_glyph (substitute[index]);
188 template<typename Iterator,
189 hb_requires (hb_is_sorted_source_of (Iterator,
190 hb_codepoint_pair_t))>
191 bool serialize (hb_serialize_context_t *c,
194 TRACE_SERIALIZE (this);
201 | hb_map_retains_sorting (hb_first)
203 if (unlikely (!c->extend_min (*this))) return_trace (false);
204 if (unlikely (!substitute.serialize (c, substitutes))) return_trace (false);
205 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
209 bool subset (hb_subset_context_t *c) const
212 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
213 const hb_map_t &glyph_map = *c->plan->glyph_map;
216 + hb_zip (this+coverage, substitute)
217 | hb_filter (glyphset, hb_first)
218 | hb_filter (glyphset, hb_second)
219 | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const HBGlyphID &> p) -> hb_codepoint_pair_t
220 { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
223 bool ret = bool (it);
224 SingleSubst_serialize (c->serializer, it);
228 bool sanitize (hb_sanitize_context_t *c) const
230 TRACE_SANITIZE (this);
231 return_trace (coverage.sanitize (c, this) && substitute.sanitize (c));
235 HBUINT16 format; /* Format identifier--format = 2 */
237 coverage; /* Offset to Coverage table--from
238 * beginning of Substitution table */
240 substitute; /* Array of substitute
241 * GlyphIDs--ordered by Coverage Index */
243 DEFINE_SIZE_ARRAY (6, substitute);
249 template<typename Iterator,
250 hb_requires (hb_is_sorted_source_of (Iterator,
251 const hb_codepoint_pair_t))>
252 bool serialize (hb_serialize_context_t *c,
255 TRACE_SERIALIZE (this);
256 if (unlikely (!c->extend_min (u.format))) return_trace (false);
262 auto get_delta = [=] (hb_codepoint_pair_t _)
263 { return (unsigned) (_.second - _.first) & 0xFFFF; };
264 delta = get_delta (*glyphs);
265 if (!hb_all (++(+glyphs), delta, get_delta)) format = 2;
269 case 1: return_trace (u.format1.serialize (c,
271 | hb_map_retains_sorting (hb_first),
273 case 2: return_trace (u.format2.serialize (c, glyphs));
274 default:return_trace (false);
278 template <typename context_t, typename ...Ts>
279 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
281 TRACE_DISPATCH (this, u.format);
282 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
284 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
285 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
286 default:return_trace (c->default_return_value ());
292 HBUINT16 format; /* Format identifier */
293 SingleSubstFormat1 format1;
294 SingleSubstFormat2 format2;
298 template<typename Iterator>
300 SingleSubst_serialize (hb_serialize_context_t *c,
302 { c->start_embed<SingleSubst> ()->serialize (c, it); }
306 bool intersects (const hb_set_t *glyphs) const
307 { return hb_all (substitute, glyphs); }
309 void closure (hb_closure_context_t *c) const
310 { c->output->add_array (substitute.arrayZ, substitute.len); }
312 void collect_glyphs (hb_collect_glyphs_context_t *c) const
313 { c->output->add_array (substitute.arrayZ, substitute.len); }
315 bool apply (hb_ot_apply_context_t *c) const
318 unsigned int count = substitute.len;
320 /* Special-case to make it in-place and not consider this
321 * as a "multiplied" substitution. */
322 if (unlikely (count == 1))
324 c->replace_glyph (substitute.arrayZ[0]);
327 /* Spec disallows this, but Uniscribe allows it.
328 * https://github.com/harfbuzz/harfbuzz/issues/253 */
329 else if (unlikely (count == 0))
331 c->buffer->delete_glyph ();
335 unsigned int klass = _hb_glyph_info_is_ligature (&c->buffer->cur()) ?
336 HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0;
338 for (unsigned int i = 0; i < count; i++) {
339 _hb_glyph_info_set_lig_props_for_component (&c->buffer->cur(), i);
340 c->output_glyph_for_component (substitute.arrayZ[i], klass);
342 c->buffer->skip_glyph ();
347 template <typename Iterator,
348 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
349 bool serialize (hb_serialize_context_t *c,
352 TRACE_SERIALIZE (this);
353 return_trace (substitute.serialize (c, subst));
356 bool subset (hb_subset_context_t *c) const
359 const hb_set_t &glyphset = *c->plan->glyphset ();
360 const hb_map_t &glyph_map = *c->plan->glyph_map;
362 if (!intersects (&glyphset)) return_trace (false);
365 + hb_iter (substitute)
369 auto *out = c->serializer->start_embed (*this);
370 return_trace (out->serialize (c->serializer, it));
373 bool sanitize (hb_sanitize_context_t *c) const
375 TRACE_SANITIZE (this);
376 return_trace (substitute.sanitize (c));
381 substitute; /* String of GlyphIDs to substitute */
383 DEFINE_SIZE_ARRAY (2, substitute);
386 struct MultipleSubstFormat1
388 bool intersects (const hb_set_t *glyphs) const
389 { return (this+coverage).intersects (glyphs); }
391 void closure (hb_closure_context_t *c) const
393 + hb_zip (this+coverage, sequence)
394 | hb_filter (*c->glyphs, hb_first)
396 | hb_map (hb_add (this))
397 | hb_apply ([c] (const Sequence &_) { _.closure (c); })
401 void closure_lookups (hb_closure_lookups_context_t *c) const {}
403 void collect_glyphs (hb_collect_glyphs_context_t *c) const
405 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
406 + hb_zip (this+coverage, sequence)
408 | hb_map (hb_add (this))
409 | hb_apply ([c] (const Sequence &_) { _.collect_glyphs (c); })
413 const Coverage &get_coverage () const { return this+coverage; }
415 bool would_apply (hb_would_apply_context_t *c) const
416 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
418 bool apply (hb_ot_apply_context_t *c) const
422 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
423 if (likely (index == NOT_COVERED)) return_trace (false);
425 return_trace ((this+sequence[index]).apply (c));
428 bool serialize (hb_serialize_context_t *c,
429 hb_sorted_array_t<const HBGlyphID> glyphs,
430 hb_array_t<const unsigned int> substitute_len_list,
431 hb_array_t<const HBGlyphID> substitute_glyphs_list)
433 TRACE_SERIALIZE (this);
434 if (unlikely (!c->extend_min (*this))) return_trace (false);
435 if (unlikely (!sequence.serialize (c, glyphs.length))) return_trace (false);
436 for (unsigned int i = 0; i < glyphs.length; i++)
438 unsigned int substitute_len = substitute_len_list[i];
439 if (unlikely (!sequence[i].serialize (c, this)
440 .serialize (c, substitute_glyphs_list.sub_array (0, substitute_len))))
441 return_trace (false);
442 substitute_glyphs_list += substitute_len;
444 return_trace (coverage.serialize (c, this).serialize (c, glyphs));
447 bool subset (hb_subset_context_t *c) const
450 const hb_set_t &glyphset = *c->plan->glyphset ();
451 const hb_map_t &glyph_map = *c->plan->glyph_map;
453 auto *out = c->serializer->start_embed (*this);
454 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
455 out->format = format;
457 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
458 + hb_zip (this+coverage, sequence)
459 | hb_filter (glyphset, hb_first)
460 | hb_filter (subset_offset_array (c, out->sequence, this), hb_second)
463 | hb_sink (new_coverage)
465 out->coverage.serialize (c->serializer, out)
466 .serialize (c->serializer, new_coverage.iter ());
467 return_trace (bool (new_coverage));
470 bool sanitize (hb_sanitize_context_t *c) const
472 TRACE_SANITIZE (this);
473 return_trace (coverage.sanitize (c, this) && sequence.sanitize (c, this));
477 HBUINT16 format; /* Format identifier--format = 1 */
479 coverage; /* Offset to Coverage table--from
480 * beginning of Substitution table */
481 OffsetArrayOf<Sequence>
482 sequence; /* Array of Sequence tables
483 * ordered by Coverage Index */
485 DEFINE_SIZE_ARRAY (6, sequence);
490 bool serialize (hb_serialize_context_t *c,
491 hb_sorted_array_t<const HBGlyphID> glyphs,
492 hb_array_t<const unsigned int> substitute_len_list,
493 hb_array_t<const HBGlyphID> substitute_glyphs_list)
495 TRACE_SERIALIZE (this);
496 if (unlikely (!c->extend_min (u.format))) return_trace (false);
497 unsigned int format = 1;
500 case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, substitute_glyphs_list));
501 default:return_trace (false);
505 template <typename context_t, typename ...Ts>
506 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
508 TRACE_DISPATCH (this, u.format);
509 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
511 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
512 default:return_trace (c->default_return_value ());
518 HBUINT16 format; /* Format identifier */
519 MultipleSubstFormat1 format1;
525 bool intersects (const hb_set_t *glyphs) const
526 { return hb_any (alternates, glyphs); }
528 void closure (hb_closure_context_t *c) const
529 { c->output->add_array (alternates.arrayZ, alternates.len); }
531 void collect_glyphs (hb_collect_glyphs_context_t *c) const
532 { c->output->add_array (alternates.arrayZ, alternates.len); }
534 bool apply (hb_ot_apply_context_t *c) const
537 unsigned int count = alternates.len;
539 if (unlikely (!count)) return_trace (false);
541 hb_mask_t glyph_mask = c->buffer->cur().mask;
542 hb_mask_t lookup_mask = c->lookup_mask;
544 /* Note: This breaks badly if two features enabled this lookup together. */
545 unsigned int shift = hb_ctz (lookup_mask);
546 unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
548 /* If alt_index is MAX_VALUE, randomize feature if it is the rand feature. */
549 if (alt_index == HB_OT_MAP_MAX_VALUE && c->random)
550 alt_index = c->random_number () % count + 1;
552 if (unlikely (alt_index > count || alt_index == 0)) return_trace (false);
554 c->replace_glyph (alternates[alt_index - 1]);
559 template <typename Iterator,
560 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
561 bool serialize (hb_serialize_context_t *c,
564 TRACE_SERIALIZE (this);
565 return_trace (alternates.serialize (c, alts));
568 bool subset (hb_subset_context_t *c) const
571 const hb_set_t &glyphset = *c->plan->glyphset ();
572 const hb_map_t &glyph_map = *c->plan->glyph_map;
575 + hb_iter (alternates)
576 | hb_filter (glyphset)
580 auto *out = c->serializer->start_embed (*this);
581 return_trace (out->serialize (c->serializer, it) &&
585 bool sanitize (hb_sanitize_context_t *c) const
587 TRACE_SANITIZE (this);
588 return_trace (alternates.sanitize (c));
593 alternates; /* Array of alternate GlyphIDs--in
596 DEFINE_SIZE_ARRAY (2, alternates);
599 struct AlternateSubstFormat1
601 bool intersects (const hb_set_t *glyphs) const
602 { return (this+coverage).intersects (glyphs); }
604 void closure (hb_closure_context_t *c) const
606 + hb_zip (this+coverage, alternateSet)
607 | hb_filter (c->glyphs, hb_first)
609 | hb_map (hb_add (this))
610 | hb_apply ([c] (const AlternateSet &_) { _.closure (c); })
614 void closure_lookups (hb_closure_lookups_context_t *c) const {}
616 void collect_glyphs (hb_collect_glyphs_context_t *c) const
618 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
619 + hb_zip (this+coverage, alternateSet)
621 | hb_map (hb_add (this))
622 | hb_apply ([c] (const AlternateSet &_) { _.collect_glyphs (c); })
626 const Coverage &get_coverage () const { return this+coverage; }
628 bool would_apply (hb_would_apply_context_t *c) const
629 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
631 bool apply (hb_ot_apply_context_t *c) const
635 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
636 if (likely (index == NOT_COVERED)) return_trace (false);
638 return_trace ((this+alternateSet[index]).apply (c));
641 bool serialize (hb_serialize_context_t *c,
642 hb_sorted_array_t<const HBGlyphID> glyphs,
643 hb_array_t<const unsigned int> alternate_len_list,
644 hb_array_t<const HBGlyphID> alternate_glyphs_list)
646 TRACE_SERIALIZE (this);
647 if (unlikely (!c->extend_min (*this))) return_trace (false);
648 if (unlikely (!alternateSet.serialize (c, glyphs.length))) return_trace (false);
649 for (unsigned int i = 0; i < glyphs.length; i++)
651 unsigned int alternate_len = alternate_len_list[i];
652 if (unlikely (!alternateSet[i].serialize (c, this)
653 .serialize (c, alternate_glyphs_list.sub_array (0, alternate_len))))
654 return_trace (false);
655 alternate_glyphs_list += alternate_len;
657 return_trace (coverage.serialize (c, this).serialize (c, glyphs));
660 bool subset (hb_subset_context_t *c) const
663 const hb_set_t &glyphset = *c->plan->glyphset ();
664 const hb_map_t &glyph_map = *c->plan->glyph_map;
666 auto *out = c->serializer->start_embed (*this);
667 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
668 out->format = format;
670 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
671 + hb_zip (this+coverage, alternateSet)
672 | hb_filter (glyphset, hb_first)
673 | hb_filter (subset_offset_array (c, out->alternateSet, this), hb_second)
676 | hb_sink (new_coverage)
678 out->coverage.serialize (c->serializer, out)
679 .serialize (c->serializer, new_coverage.iter ());
680 return_trace (bool (new_coverage));
683 bool sanitize (hb_sanitize_context_t *c) const
685 TRACE_SANITIZE (this);
686 return_trace (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
690 HBUINT16 format; /* Format identifier--format = 1 */
692 coverage; /* Offset to Coverage table--from
693 * beginning of Substitution table */
694 OffsetArrayOf<AlternateSet>
695 alternateSet; /* Array of AlternateSet tables
696 * ordered by Coverage Index */
698 DEFINE_SIZE_ARRAY (6, alternateSet);
701 struct AlternateSubst
703 bool serialize (hb_serialize_context_t *c,
704 hb_sorted_array_t<const HBGlyphID> glyphs,
705 hb_array_t<const unsigned int> alternate_len_list,
706 hb_array_t<const HBGlyphID> alternate_glyphs_list)
708 TRACE_SERIALIZE (this);
709 if (unlikely (!c->extend_min (u.format))) return_trace (false);
710 unsigned int format = 1;
713 case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, alternate_glyphs_list));
714 default:return_trace (false);
718 template <typename context_t, typename ...Ts>
719 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
721 TRACE_DISPATCH (this, u.format);
722 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
724 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
725 default:return_trace (c->default_return_value ());
731 HBUINT16 format; /* Format identifier */
732 AlternateSubstFormat1 format1;
739 bool intersects (const hb_set_t *glyphs) const
740 { return hb_all (component, glyphs); }
742 void closure (hb_closure_context_t *c) const
744 if (!intersects (c->glyphs)) return;
745 c->output->add (ligGlyph);
748 void collect_glyphs (hb_collect_glyphs_context_t *c) const
750 c->input->add_array (component.arrayZ, component.get_length ());
751 c->output->add (ligGlyph);
754 bool would_apply (hb_would_apply_context_t *c) const
756 if (c->len != component.lenP1)
759 for (unsigned int i = 1; i < c->len; i++)
760 if (likely (c->glyphs[i] != component[i]))
766 bool apply (hb_ot_apply_context_t *c) const
769 unsigned int count = component.lenP1;
771 if (unlikely (!count)) return_trace (false);
773 /* Special-case to make it in-place and not consider this
774 * as a "ligated" substitution. */
775 if (unlikely (count == 1))
777 c->replace_glyph (ligGlyph);
781 unsigned int total_component_count = 0;
783 unsigned int match_length = 0;
784 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
786 if (likely (!match_input (c, count,
792 &total_component_count)))
793 return_trace (false);
800 total_component_count);
805 template <typename Iterator,
806 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
807 bool serialize (hb_serialize_context_t *c,
808 hb_codepoint_t ligature,
809 Iterator components /* Starting from second */)
811 TRACE_SERIALIZE (this);
812 if (unlikely (!c->extend_min (*this))) return_trace (false);
814 if (unlikely (!component.serialize (c, components))) return_trace (false);
818 bool subset (hb_subset_context_t *c) const
821 const hb_set_t &glyphset = *c->plan->glyphset ();
822 const hb_map_t &glyph_map = *c->plan->glyph_map;
824 if (!intersects (&glyphset) || !glyphset.has (ligGlyph)) return_trace (false);
827 + hb_iter (component)
831 auto *out = c->serializer->start_embed (*this);
832 return_trace (out->serialize (c->serializer,
838 bool sanitize (hb_sanitize_context_t *c) const
840 TRACE_SANITIZE (this);
841 return_trace (ligGlyph.sanitize (c) && component.sanitize (c));
845 HBGlyphID ligGlyph; /* GlyphID of ligature to substitute */
846 HeadlessArrayOf<HBGlyphID>
847 component; /* Array of component GlyphIDs--start
848 * with the second component--ordered
849 * in writing direction */
851 DEFINE_SIZE_ARRAY (4, component);
856 bool intersects (const hb_set_t *glyphs) const
860 | hb_map (hb_add (this))
861 | hb_map ([glyphs] (const Ligature &_) { return _.intersects (glyphs); })
866 void closure (hb_closure_context_t *c) const
869 | hb_map (hb_add (this))
870 | hb_apply ([c] (const Ligature &_) { _.closure (c); })
874 void collect_glyphs (hb_collect_glyphs_context_t *c) const
877 | hb_map (hb_add (this))
878 | hb_apply ([c] (const Ligature &_) { _.collect_glyphs (c); })
882 bool would_apply (hb_would_apply_context_t *c) const
886 | hb_map (hb_add (this))
887 | hb_map ([c] (const Ligature &_) { return _.would_apply (c); })
892 bool apply (hb_ot_apply_context_t *c) const
895 unsigned int num_ligs = ligature.len;
896 for (unsigned int i = 0; i < num_ligs; i++)
898 const Ligature &lig = this+ligature[i];
899 if (lig.apply (c)) return_trace (true);
902 return_trace (false);
905 bool serialize (hb_serialize_context_t *c,
906 hb_array_t<const HBGlyphID> ligatures,
907 hb_array_t<const unsigned int> component_count_list,
908 hb_array_t<const HBGlyphID> &component_list /* Starting from second for each ligature */)
910 TRACE_SERIALIZE (this);
911 if (unlikely (!c->extend_min (*this))) return_trace (false);
912 if (unlikely (!ligature.serialize (c, ligatures.length))) return_trace (false);
913 for (unsigned int i = 0; i < ligatures.length; i++)
915 unsigned int component_count = (unsigned) hb_max ((int) component_count_list[i] - 1, 0);
916 if (unlikely (!ligature[i].serialize (c, this)
919 component_list.sub_array (0, component_count))))
920 return_trace (false);
921 component_list += component_count;
926 bool subset (hb_subset_context_t *c) const
929 auto *out = c->serializer->start_embed (*this);
930 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
933 | hb_filter (subset_offset_array (c, out->ligature, this))
936 return_trace (bool (out->ligature));
939 bool sanitize (hb_sanitize_context_t *c) const
941 TRACE_SANITIZE (this);
942 return_trace (ligature.sanitize (c, this));
946 OffsetArrayOf<Ligature>
947 ligature; /* Array LigatureSet tables
948 * ordered by preference */
950 DEFINE_SIZE_ARRAY (2, ligature);
953 struct LigatureSubstFormat1
955 bool intersects (const hb_set_t *glyphs) const
958 + hb_zip (this+coverage, ligatureSet)
959 | hb_filter (*glyphs, hb_first)
961 | hb_map ([this, glyphs] (const OffsetTo<LigatureSet> &_)
962 { return (this+_).intersects (glyphs); })
967 void closure (hb_closure_context_t *c) const
969 + hb_zip (this+coverage, ligatureSet)
970 | hb_filter (*c->glyphs, hb_first)
972 | hb_map (hb_add (this))
973 | hb_apply ([c] (const LigatureSet &_) { _.closure (c); })
977 void closure_lookups (hb_closure_lookups_context_t *c) const {}
979 void collect_glyphs (hb_collect_glyphs_context_t *c) const
981 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
983 + hb_zip (this+coverage, ligatureSet)
985 | hb_map (hb_add (this))
986 | hb_apply ([c] (const LigatureSet &_) { _.collect_glyphs (c); })
990 const Coverage &get_coverage () const { return this+coverage; }
992 bool would_apply (hb_would_apply_context_t *c) const
994 unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
995 if (likely (index == NOT_COVERED)) return false;
997 const LigatureSet &lig_set = this+ligatureSet[index];
998 return lig_set.would_apply (c);
1001 bool apply (hb_ot_apply_context_t *c) const
1005 unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
1006 if (likely (index == NOT_COVERED)) return_trace (false);
1008 const LigatureSet &lig_set = this+ligatureSet[index];
1009 return_trace (lig_set.apply (c));
1012 bool serialize (hb_serialize_context_t *c,
1013 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1014 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1015 hb_array_t<const HBGlyphID> ligatures_list,
1016 hb_array_t<const unsigned int> component_count_list,
1017 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1019 TRACE_SERIALIZE (this);
1020 if (unlikely (!c->extend_min (*this))) return_trace (false);
1021 if (unlikely (!ligatureSet.serialize (c, first_glyphs.length))) return_trace (false);
1022 for (unsigned int i = 0; i < first_glyphs.length; i++)
1024 unsigned int ligature_count = ligature_per_first_glyph_count_list[i];
1025 if (unlikely (!ligatureSet[i].serialize (c, this)
1027 ligatures_list.sub_array (0, ligature_count),
1028 component_count_list.sub_array (0, ligature_count),
1029 component_list))) return_trace (false);
1030 ligatures_list += ligature_count;
1031 component_count_list += ligature_count;
1033 return_trace (coverage.serialize (c, this).serialize (c, first_glyphs));
1036 bool subset (hb_subset_context_t *c) const
1038 TRACE_SUBSET (this);
1039 const hb_set_t &glyphset = *c->plan->glyphset ();
1040 const hb_map_t &glyph_map = *c->plan->glyph_map;
1042 auto *out = c->serializer->start_embed (*this);
1043 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1044 out->format = format;
1046 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1047 + hb_zip (this+coverage, ligatureSet)
1048 | hb_filter (glyphset, hb_first)
1049 | hb_filter (subset_offset_array (c, out->ligatureSet, this), hb_second)
1051 | hb_map (glyph_map)
1052 | hb_sink (new_coverage)
1054 out->coverage.serialize (c->serializer, out)
1055 .serialize (c->serializer, new_coverage.iter ());
1056 return_trace (bool (new_coverage));
1059 bool sanitize (hb_sanitize_context_t *c) const
1061 TRACE_SANITIZE (this);
1062 return_trace (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
1066 HBUINT16 format; /* Format identifier--format = 1 */
1068 coverage; /* Offset to Coverage table--from
1069 * beginning of Substitution table */
1070 OffsetArrayOf<LigatureSet>
1071 ligatureSet; /* Array LigatureSet tables
1072 * ordered by Coverage Index */
1074 DEFINE_SIZE_ARRAY (6, ligatureSet);
1077 struct LigatureSubst
1079 bool serialize (hb_serialize_context_t *c,
1080 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1081 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1082 hb_array_t<const HBGlyphID> ligatures_list,
1083 hb_array_t<const unsigned int> component_count_list,
1084 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1086 TRACE_SERIALIZE (this);
1087 if (unlikely (!c->extend_min (u.format))) return_trace (false);
1088 unsigned int format = 1;
1091 case 1: return_trace (u.format1.serialize (c,
1093 ligature_per_first_glyph_count_list,
1095 component_count_list,
1097 default:return_trace (false);
1101 template <typename context_t, typename ...Ts>
1102 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1104 TRACE_DISPATCH (this, u.format);
1105 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1107 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1108 default:return_trace (c->default_return_value ());
1114 HBUINT16 format; /* Format identifier */
1115 LigatureSubstFormat1 format1;
1120 struct ContextSubst : Context {};
1122 struct ChainContextSubst : ChainContext {};
1124 struct ExtensionSubst : Extension<ExtensionSubst>
1126 typedef struct SubstLookupSubTable SubTable;
1127 bool is_reverse () const;
1131 struct ReverseChainSingleSubstFormat1
1133 bool intersects (const hb_set_t *glyphs) const
1135 if (!(this+coverage).intersects (glyphs))
1138 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1142 count = backtrack.len;
1143 for (unsigned int i = 0; i < count; i++)
1144 if (!(this+backtrack[i]).intersects (glyphs))
1147 count = lookahead.len;
1148 for (unsigned int i = 0; i < count; i++)
1149 if (!(this+lookahead[i]).intersects (glyphs))
1155 void closure (hb_closure_context_t *c) const
1157 if (!intersects (c->glyphs)) return;
1159 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1160 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1162 + hb_zip (this+coverage, substitute)
1163 | hb_filter (*c->glyphs, hb_first)
1164 | hb_map (hb_second)
1165 | hb_sink (c->output)
1169 void closure_lookups (hb_closure_lookups_context_t *c) const {}
1171 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1173 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1177 count = backtrack.len;
1178 for (unsigned int i = 0; i < count; i++)
1179 if (unlikely (!(this+backtrack[i]).collect_coverage (c->before))) return;
1181 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1182 count = lookahead.len;
1183 for (unsigned int i = 0; i < count; i++)
1184 if (unlikely (!(this+lookahead[i]).collect_coverage (c->after))) return;
1186 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1187 count = substitute.len;
1188 c->output->add_array (substitute.arrayZ, substitute.len);
1191 const Coverage &get_coverage () const { return this+coverage; }
1193 bool would_apply (hb_would_apply_context_t *c) const
1194 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
1196 bool apply (hb_ot_apply_context_t *c) const
1199 if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
1200 return_trace (false); /* No chaining to this type */
1202 unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
1203 if (likely (index == NOT_COVERED)) return_trace (false);
1205 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1206 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1208 unsigned int start_index = 0, end_index = 0;
1209 if (match_backtrack (c,
1210 backtrack.len, (HBUINT16 *) backtrack.arrayZ,
1211 match_coverage, this,
1214 lookahead.len, (HBUINT16 *) lookahead.arrayZ,
1215 match_coverage, this,
1218 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
1219 c->replace_glyph_inplace (substitute[index]);
1220 /* Note: We DON'T decrease buffer->idx. The main loop does it
1221 * for us. This is useful for preventing surprises if someone
1222 * calls us through a Context lookup. */
1223 return_trace (true);
1226 return_trace (false);
1229 bool subset (hb_subset_context_t *c) const
1231 TRACE_SUBSET (this);
1233 return_trace (false);
1236 bool sanitize (hb_sanitize_context_t *c) const
1238 TRACE_SANITIZE (this);
1239 if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
1240 return_trace (false);
1241 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1242 if (!lookahead.sanitize (c, this))
1243 return_trace (false);
1244 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1245 return_trace (substitute.sanitize (c));
1249 HBUINT16 format; /* Format identifier--format = 1 */
1251 coverage; /* Offset to Coverage table--from
1252 * beginning of table */
1253 OffsetArrayOf<Coverage>
1254 backtrack; /* Array of coverage tables
1255 * in backtracking sequence, in glyph
1257 OffsetArrayOf<Coverage>
1258 lookaheadX; /* Array of coverage tables
1259 * in lookahead sequence, in glyph
1262 substituteX; /* Array of substitute
1263 * GlyphIDs--ordered by Coverage Index */
1265 DEFINE_SIZE_MIN (10);
1268 struct ReverseChainSingleSubst
1270 template <typename context_t, typename ...Ts>
1271 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1273 TRACE_DISPATCH (this, u.format);
1274 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1276 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1277 default:return_trace (c->default_return_value ());
1283 HBUINT16 format; /* Format identifier */
1284 ReverseChainSingleSubstFormat1 format1;
1294 struct SubstLookupSubTable
1296 friend struct Lookup;
1297 friend struct SubstLookup;
1307 ReverseChainSingle = 8
1310 template <typename context_t, typename ...Ts>
1311 typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
1313 TRACE_DISPATCH (this, lookup_type);
1314 switch (lookup_type) {
1315 case Single: return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
1316 case Multiple: return_trace (u.multiple.dispatch (c, hb_forward<Ts> (ds)...));
1317 case Alternate: return_trace (u.alternate.dispatch (c, hb_forward<Ts> (ds)...));
1318 case Ligature: return_trace (u.ligature.dispatch (c, hb_forward<Ts> (ds)...));
1319 case Context: return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
1320 case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
1321 case Extension: return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
1322 case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c, hb_forward<Ts> (ds)...));
1323 default: return_trace (c->default_return_value ());
1327 bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
1329 hb_intersects_context_t c (glyphs);
1330 return dispatch (&c, lookup_type);
1336 MultipleSubst multiple;
1337 AlternateSubst alternate;
1338 LigatureSubst ligature;
1339 ContextSubst context;
1340 ChainContextSubst chainContext;
1341 ExtensionSubst extension;
1342 ReverseChainSingleSubst reverseChainContextSingle;
1345 DEFINE_SIZE_MIN (0);
1349 struct SubstLookup : Lookup
1351 typedef SubstLookupSubTable SubTable;
1353 const SubTable& get_subtable (unsigned int i) const
1354 { return Lookup::get_subtable<SubTable> (i); }
1356 static inline bool lookup_type_is_reverse (unsigned int lookup_type)
1357 { return lookup_type == SubTable::ReverseChainSingle; }
1359 bool is_reverse () const
1361 unsigned int type = get_type ();
1362 if (unlikely (type == SubTable::Extension))
1363 return reinterpret_cast<const ExtensionSubst &> (get_subtable (0)).is_reverse ();
1364 return lookup_type_is_reverse (type);
1367 bool apply (hb_ot_apply_context_t *c) const
1370 return_trace (dispatch (c));
1373 bool intersects (const hb_set_t *glyphs) const
1375 hb_intersects_context_t c (glyphs);
1376 return dispatch (&c);
1379 hb_closure_context_t::return_t closure (hb_closure_context_t *c, unsigned int this_index) const
1381 if (!c->should_visit_lookup (this_index))
1382 return hb_closure_context_t::default_return_value ();
1384 c->set_recurse_func (dispatch_closure_recurse_func);
1386 hb_closure_context_t::return_t ret = dispatch (c);
1393 hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
1395 if (c->is_lookup_visited (this_index))
1396 return hb_closure_lookups_context_t::default_return_value ();
1398 c->set_lookup_visited (this_index);
1399 if (!intersects (c->glyphs))
1401 c->set_lookup_inactive (this_index);
1402 return hb_closure_lookups_context_t::default_return_value ();
1405 c->set_recurse_func (dispatch_closure_lookups_recurse_func);
1407 hb_closure_lookups_context_t::return_t ret = dispatch (c);
1411 hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
1413 c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>);
1414 return dispatch (c);
1417 template <typename set_t>
1418 void collect_coverage (set_t *glyphs) const
1420 hb_collect_coverage_context_t<set_t> c (glyphs);
1424 bool would_apply (hb_would_apply_context_t *c,
1425 const hb_ot_layout_lookup_accelerator_t *accel) const
1427 if (unlikely (!c->len)) return false;
1428 if (!accel->may_have (c->glyphs[0])) return false;
1429 return dispatch (c);
1432 static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
1434 SubTable& serialize_subtable (hb_serialize_context_t *c,
1436 { return get_subtables<SubTable> ()[i].serialize (c, this); }
1438 bool serialize_single (hb_serialize_context_t *c,
1439 uint32_t lookup_props,
1440 hb_sorted_array_t<const HBGlyphID> glyphs,
1441 hb_array_t<const HBGlyphID> substitutes)
1443 TRACE_SERIALIZE (this);
1444 if (unlikely (!Lookup::serialize (c, SubTable::Single, lookup_props, 1))) return_trace (false);
1445 return_trace (serialize_subtable (c, 0).u.single.
1446 serialize (c, hb_zip (glyphs, substitutes)));
1449 bool serialize_multiple (hb_serialize_context_t *c,
1450 uint32_t lookup_props,
1451 hb_sorted_array_t<const HBGlyphID> glyphs,
1452 hb_array_t<const unsigned int> substitute_len_list,
1453 hb_array_t<const HBGlyphID> substitute_glyphs_list)
1455 TRACE_SERIALIZE (this);
1456 if (unlikely (!Lookup::serialize (c, SubTable::Multiple, lookup_props, 1))) return_trace (false);
1457 return_trace (serialize_subtable (c, 0).u.multiple.
1460 substitute_len_list,
1461 substitute_glyphs_list));
1464 bool serialize_alternate (hb_serialize_context_t *c,
1465 uint32_t lookup_props,
1466 hb_sorted_array_t<const HBGlyphID> glyphs,
1467 hb_array_t<const unsigned int> alternate_len_list,
1468 hb_array_t<const HBGlyphID> alternate_glyphs_list)
1470 TRACE_SERIALIZE (this);
1471 if (unlikely (!Lookup::serialize (c, SubTable::Alternate, lookup_props, 1))) return_trace (false);
1472 return_trace (serialize_subtable (c, 0).u.alternate.
1476 alternate_glyphs_list));
1479 bool serialize_ligature (hb_serialize_context_t *c,
1480 uint32_t lookup_props,
1481 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1482 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1483 hb_array_t<const HBGlyphID> ligatures_list,
1484 hb_array_t<const unsigned int> component_count_list,
1485 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1487 TRACE_SERIALIZE (this);
1488 if (unlikely (!Lookup::serialize (c, SubTable::Ligature, lookup_props, 1))) return_trace (false);
1489 return_trace (serialize_subtable (c, 0).u.ligature.
1492 ligature_per_first_glyph_count_list,
1494 component_count_list,
1498 template <typename context_t>
1499 static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
1501 static inline hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
1503 if (!c->should_visit_lookup (lookup_index))
1504 return hb_empty_t ();
1506 hb_closure_context_t::return_t ret = dispatch_recurse_func (c, lookup_index);
1508 /* While in theory we should flush here, it will cause timeouts because a recursive
1509 * lookup can keep growing the glyph set. Skip, and outer loop will retry up to
1510 * HB_CLOSURE_MAX_STAGES time, which should be enough for every realistic font. */
1516 HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned lookup_index);
1518 template <typename context_t, typename ...Ts>
1519 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1520 { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
1522 bool subset (hb_subset_context_t *c) const
1523 { return Lookup::subset<SubTable> (c); }
1525 bool sanitize (hb_sanitize_context_t *c) const
1526 { return Lookup::sanitize<SubTable> (c); }
1530 * GSUB -- Glyph Substitution
1531 * https://docs.microsoft.com/en-us/typography/opentype/spec/gsub
1534 struct GSUB : GSUBGPOS
1536 static constexpr hb_tag_t tableTag = HB_OT_TAG_GSUB;
1538 const SubstLookup& get_lookup (unsigned int i) const
1539 { return static_cast<const SubstLookup &> (GSUBGPOS::get_lookup (i)); }
1541 bool subset (hb_subset_context_t *c) const
1543 hb_subset_layout_context_t l (c, tableTag, c->plan->gsub_lookups, c->plan->gsub_features);
1544 return GSUBGPOS::subset<SubstLookup> (&l);
1547 bool sanitize (hb_sanitize_context_t *c) const
1548 { return GSUBGPOS::sanitize<SubstLookup> (c); }
1550 HB_INTERNAL bool is_blacklisted (hb_blob_t *blob,
1551 hb_face_t *face) const;
1553 typedef GSUBGPOS::accelerator_t<GSUB> accelerator_t;
1557 struct GSUB_accelerator_t : GSUB::accelerator_t {};
1560 /* Out-of-class implementation for methods recursing */
1562 #ifndef HB_NO_OT_LAYOUT
1563 /*static*/ inline bool ExtensionSubst::is_reverse () const
1565 return SubstLookup::lookup_type_is_reverse (get_type ());
1567 template <typename context_t>
1568 /*static*/ typename context_t::return_t SubstLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
1570 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
1571 return l.dispatch (c);
1574 /*static*/ inline hb_closure_lookups_context_t::return_t SubstLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
1576 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (this_index);
1577 return l.closure_lookups (c, this_index);
1580 /*static*/ bool SubstLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
1582 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
1583 unsigned int saved_lookup_props = c->lookup_props;
1584 unsigned int saved_lookup_index = c->lookup_index;
1585 c->set_lookup_index (lookup_index);
1586 c->set_lookup_props (l.get_props ());
1587 bool ret = l.dispatch (c);
1588 c->set_lookup_index (saved_lookup_index);
1589 c->set_lookup_props (saved_lookup_props);
1595 } /* namespace OT */
1598 #endif /* HB_OT_LAYOUT_GSUB_TABLE_HH */