2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-table.hh"
34 #include "hb-set-private.hh"
38 #ifndef HB_DEBUG_CLOSURE
39 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
42 #define TRACE_CLOSURE() \
43 hb_auto_trace_t<HB_DEBUG_CLOSURE> trace (&c->debug_depth, "CLOSURE", this, HB_FUNC, "");
46 struct hb_closure_context_t
50 unsigned int nesting_level_left;
51 unsigned int debug_depth;
54 hb_closure_context_t (hb_face_t *face_,
56 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
59 nesting_level_left (nesting_level_left_),
65 /* TODO Add TRACE_RETURN annotation to gsub. */
66 #ifndef HB_DEBUG_WOULD_APPLY
67 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
70 #define TRACE_WOULD_APPLY() \
71 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY> trace (&c->debug_depth, "WOULD_APPLY", this, HB_FUNC, "%d glyphs", c->len);
74 struct hb_would_apply_context_t
77 const hb_codepoint_t *glyphs;
79 const hb_set_digest_t digest;
80 unsigned int debug_depth;
82 hb_would_apply_context_t (hb_face_t *face_,
83 const hb_codepoint_t *glyphs_,
85 const hb_set_digest_t *digest_
95 #ifndef HB_DEBUG_APPLY
96 #define HB_DEBUG_APPLY (HB_DEBUG+0)
99 #define TRACE_APPLY() \
100 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
103 struct hb_apply_context_t
108 hb_direction_t direction;
109 hb_mask_t lookup_mask;
110 unsigned int nesting_level_left;
111 unsigned int lookup_props;
112 unsigned int property; /* propety of first glyph */
113 unsigned int debug_depth;
115 bool has_glyph_classes;
116 const hb_set_digest_t digest;
119 hb_apply_context_t (hb_font_t *font_,
120 hb_buffer_t *buffer_,
121 hb_mask_t lookup_mask_,
122 const hb_set_digest_t *digest_) :
123 font (font_), face (font->face), buffer (buffer_),
124 direction (buffer_->props.direction),
125 lookup_mask (lookup_mask_),
126 nesting_level_left (MAX_NESTING_LEVEL),
127 lookup_props (0), property (0), debug_depth (0),
128 gdef (*hb_ot_layout_from_face (face)->gdef),
129 has_glyph_classes (gdef.has_glyph_classes ()),
132 void set_lookup (const Lookup &l) {
133 lookup_props = l.get_props ();
136 struct mark_skipping_forward_iterator_t
138 inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
139 unsigned int start_index_,
140 unsigned int num_items_,
141 bool context_match = false)
145 num_items = num_items_;
146 mask = context_match ? -1 : c->lookup_mask;
147 syllable = context_match ? 0 : c->buffer->cur().syllable ();
148 end = c->buffer->len;
150 inline bool has_no_chance (void) const
152 return unlikely (num_items && idx + num_items >= end);
154 inline void reject (void)
158 inline bool next (unsigned int *property_out,
159 unsigned int lookup_props)
161 assert (num_items > 0);
164 if (has_no_chance ())
167 } while (c->should_skip_mark (&c->buffer->info[idx], lookup_props, property_out));
169 return (c->buffer->info[idx].mask & mask) && (!syllable || syllable == c->buffer->info[idx].syllable ());
171 inline bool next (unsigned int *property_out = NULL)
173 return next (property_out, c->lookup_props);
178 hb_apply_context_t *c;
179 unsigned int num_items;
185 struct mark_skipping_backward_iterator_t
187 inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
188 unsigned int start_index_,
189 unsigned int num_items_,
191 bool match_syllable_ = true)
195 num_items = num_items_;
196 mask = mask_ ? mask_ : c->lookup_mask;
197 syllable = match_syllable_ ? c->buffer->cur().syllable () : 0;
199 inline bool has_no_chance (void) const
201 return unlikely (idx < num_items);
203 inline void reject (void)
207 inline bool prev (unsigned int *property_out,
208 unsigned int lookup_props)
210 assert (num_items > 0);
213 if (has_no_chance ())
216 } while (c->should_skip_mark (&c->buffer->out_info[idx], lookup_props, property_out));
218 return (c->buffer->out_info[idx].mask & mask) && (!syllable || syllable == c->buffer->out_info[idx].syllable ());
220 inline bool prev (unsigned int *property_out = NULL)
222 return prev (property_out, c->lookup_props);
227 hb_apply_context_t *c;
228 unsigned int num_items;
234 match_properties_mark (hb_codepoint_t glyph,
235 unsigned int glyph_props,
236 unsigned int lookup_props) const
238 /* If using mark filtering sets, the high short of
239 * lookup_props has the set index.
241 if (lookup_props & LookupFlag::UseMarkFilteringSet)
242 return gdef.mark_set_covers (lookup_props >> 16, glyph);
244 /* The second byte of lookup_props has the meaning
245 * "ignore marks of attachment type different than
246 * the attachment type specified."
248 if (lookup_props & LookupFlag::MarkAttachmentType)
249 return (lookup_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
255 match_properties (hb_codepoint_t glyph,
256 unsigned int glyph_props,
257 unsigned int lookup_props) const
259 /* Not covered, if, for example, glyph class is ligature and
260 * lookup_props includes LookupFlags::IgnoreLigatures
262 if (glyph_props & lookup_props & LookupFlag::IgnoreFlags)
265 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_CLASS_MARK))
266 return match_properties_mark (glyph, glyph_props, lookup_props);
272 check_glyph_property (hb_glyph_info_t *info,
273 unsigned int lookup_props,
274 unsigned int *property_out) const
276 unsigned int property;
278 property = info->glyph_props();
279 *property_out = property;
281 return match_properties (info->codepoint, property, lookup_props);
285 should_skip_mark (hb_glyph_info_t *info,
286 unsigned int lookup_props,
287 unsigned int *property_out) const
289 unsigned int property;
291 property = info->glyph_props();
293 *property_out = property;
295 /* If it's a mark, skip it if we don't accept it. */
296 if (unlikely (property & HB_OT_LAYOUT_GLYPH_CLASS_MARK))
297 return !match_properties (info->codepoint, property, lookup_props);
299 /* If not a mark, don't skip. */
304 inline bool should_mark_skip_current_glyph (void) const
306 return should_skip_mark (&buffer->cur(), lookup_props, NULL);
309 inline void set_class (hb_codepoint_t glyph_index, unsigned int class_guess) const
311 if (likely (has_glyph_classes))
312 buffer->cur().glyph_props() = gdef.get_glyph_props (glyph_index);
313 else if (class_guess)
314 buffer->cur().glyph_props() = class_guess;
317 inline void output_glyph (hb_codepoint_t glyph_index,
318 unsigned int class_guess = 0) const
320 set_class (glyph_index, class_guess);
321 buffer->output_glyph (glyph_index);
323 inline void replace_glyph (hb_codepoint_t glyph_index,
324 unsigned int class_guess = 0) const
326 set_class (glyph_index, class_guess);
327 buffer->replace_glyph (glyph_index);
329 inline void replace_glyph_inplace (hb_codepoint_t glyph_index,
330 unsigned int class_guess = 0) const
332 set_class (glyph_index, class_guess);
333 buffer->cur().codepoint = glyph_index;
339 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
340 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
341 typedef void (*closure_lookup_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
342 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
344 struct ContextClosureFuncs
346 intersects_func_t intersects;
347 closure_lookup_func_t closure;
349 struct ContextApplyFuncs
352 apply_lookup_func_t apply;
355 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
357 return glyphs->has (value);
359 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
361 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
362 return class_def.intersects_class (glyphs, value);
364 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
366 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
367 return (data+coverage).intersects (glyphs);
370 static inline bool intersects_array (hb_closure_context_t *c,
372 const USHORT values[],
373 intersects_func_t intersects_func,
374 const void *intersects_data)
376 for (unsigned int i = 0; i < count; i++)
377 if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
383 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
385 return glyph_id == value;
387 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
389 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
390 return class_def.get_class (glyph_id) == value;
392 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
394 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
395 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
399 static inline bool would_match_input (hb_would_apply_context_t *c,
400 unsigned int count, /* Including the first glyph (not matched) */
401 const USHORT input[], /* Array of input values--start with second glyph */
402 match_func_t match_func,
403 const void *match_data)
408 for (unsigned int i = 1; i < count; i++)
409 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
414 static inline bool match_input (hb_apply_context_t *c,
415 unsigned int count, /* Including the first glyph (not matched) */
416 const USHORT input[], /* Array of input values--start with second glyph */
417 match_func_t match_func,
418 const void *match_data,
419 unsigned int *end_offset = NULL)
421 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
422 if (skippy_iter.has_no_chance ())
425 for (unsigned int i = 1; i < count; i++)
427 if (!skippy_iter.next ())
430 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data)))
435 *end_offset = skippy_iter.idx - c->buffer->idx + 1;
440 static inline bool match_backtrack (hb_apply_context_t *c,
442 const USHORT backtrack[],
443 match_func_t match_func,
444 const void *match_data)
446 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true);
447 if (skippy_iter.has_no_chance ())
450 for (unsigned int i = 0; i < count; i++)
452 if (!skippy_iter.prev ())
455 if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
462 static inline bool match_lookahead (hb_apply_context_t *c,
464 const USHORT lookahead[],
465 match_func_t match_func,
466 const void *match_data,
469 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true);
470 if (skippy_iter.has_no_chance ())
473 for (unsigned int i = 0; i < count; i++)
475 if (!skippy_iter.next ())
478 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
489 inline bool sanitize (hb_sanitize_context_t *c) {
491 return TRACE_RETURN (c->check_struct (this));
494 USHORT sequenceIndex; /* Index into current glyph
495 * sequence--first glyph = 0 */
496 USHORT lookupListIndex; /* Lookup to apply to that
497 * position--zero--based */
499 DEFINE_SIZE_STATIC (4);
503 static inline void closure_lookup (hb_closure_context_t *c,
504 unsigned int lookupCount,
505 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
506 closure_lookup_func_t closure_func)
508 for (unsigned int i = 0; i < lookupCount; i++)
509 closure_func (c, lookupRecord->lookupListIndex);
512 static inline bool apply_lookup (hb_apply_context_t *c,
513 unsigned int count, /* Including the first glyph */
514 unsigned int lookupCount,
515 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
516 apply_lookup_func_t apply_func)
518 unsigned int end = c->buffer->len;
519 if (unlikely (count == 0 || c->buffer->idx + count > end))
522 /* TODO We don't support lookupRecord arrays that are not increasing:
523 * Should be easy for in_place ones at least. */
525 /* Note: If sublookup is reverse, it will underflow after the first loop
526 * and we jump out of it. Not entirely disastrous. So we don't check
527 * for reverse lookup here.
529 for (unsigned int i = 0; i < count; /* NOP */)
531 if (unlikely (c->buffer->idx == end))
533 while (c->should_mark_skip_current_glyph ())
535 /* No lookup applied for this index */
536 c->buffer->next_glyph ();
537 if (unlikely (c->buffer->idx == end))
541 if (lookupCount && i == lookupRecord->sequenceIndex)
543 unsigned int old_pos = c->buffer->idx;
546 bool done = apply_func (c, lookupRecord->lookupListIndex);
550 /* Err, this is wrong if the lookup jumped over some glyphs */
551 i += c->buffer->idx - old_pos;
552 if (unlikely (c->buffer->idx == end))
561 /* No lookup applied for this index */
562 c->buffer->next_glyph ();
572 /* Contextual lookups */
574 struct ContextClosureLookupContext
576 ContextClosureFuncs funcs;
577 const void *intersects_data;
580 struct ContextApplyLookupContext
582 ContextApplyFuncs funcs;
583 const void *match_data;
586 static inline void context_closure_lookup (hb_closure_context_t *c,
587 unsigned int inputCount, /* Including the first glyph (not matched) */
588 const USHORT input[], /* Array of input values--start with second glyph */
589 unsigned int lookupCount,
590 const LookupRecord lookupRecord[],
591 ContextClosureLookupContext &lookup_context)
593 if (intersects_array (c,
594 inputCount ? inputCount - 1 : 0, input,
595 lookup_context.funcs.intersects, lookup_context.intersects_data))
597 lookupCount, lookupRecord,
598 lookup_context.funcs.closure);
602 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
603 unsigned int inputCount, /* Including the first glyph (not matched) */
604 const USHORT input[], /* Array of input values--start with second glyph */
605 unsigned int lookupCount,
606 const LookupRecord lookupRecord[],
607 ContextApplyLookupContext &lookup_context)
609 return would_match_input (c,
611 lookup_context.funcs.match, lookup_context.match_data);
613 static inline bool context_apply_lookup (hb_apply_context_t *c,
614 unsigned int inputCount, /* Including the first glyph (not matched) */
615 const USHORT input[], /* Array of input values--start with second glyph */
616 unsigned int lookupCount,
617 const LookupRecord lookupRecord[],
618 ContextApplyLookupContext &lookup_context)
620 return match_input (c,
622 lookup_context.funcs.match, lookup_context.match_data)
625 lookupCount, lookupRecord,
626 lookup_context.funcs.apply);
631 friend struct RuleSet;
635 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
638 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
639 context_closure_lookup (c,
641 lookupCount, lookupRecord,
645 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
647 TRACE_WOULD_APPLY ();
648 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
649 return TRACE_RETURN (context_would_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
652 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
655 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
656 return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
660 inline bool sanitize (hb_sanitize_context_t *c) {
662 return inputCount.sanitize (c)
663 && lookupCount.sanitize (c)
664 && c->check_range (input,
665 input[0].static_size * inputCount
666 + lookupRecordX[0].static_size * lookupCount);
670 USHORT inputCount; /* Total number of glyphs in input
671 * glyph sequence--includes the first
673 USHORT lookupCount; /* Number of LookupRecords */
674 USHORT input[VAR]; /* Array of match inputs--start with
676 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
679 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
684 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
687 unsigned int num_rules = rule.len;
688 for (unsigned int i = 0; i < num_rules; i++)
689 (this+rule[i]).closure (c, lookup_context);
692 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
694 TRACE_WOULD_APPLY ();
695 unsigned int num_rules = rule.len;
696 for (unsigned int i = 0; i < num_rules; i++)
698 if ((this+rule[i]).would_apply (c, lookup_context))
699 return TRACE_RETURN (true);
701 return TRACE_RETURN (false);
704 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
707 unsigned int num_rules = rule.len;
708 for (unsigned int i = 0; i < num_rules; i++)
710 if ((this+rule[i]).apply (c, lookup_context))
711 return TRACE_RETURN (true);
713 return TRACE_RETURN (false);
716 inline bool sanitize (hb_sanitize_context_t *c) {
718 return TRACE_RETURN (rule.sanitize (c, this));
723 rule; /* Array of Rule tables
724 * ordered by preference */
726 DEFINE_SIZE_ARRAY (2, rule);
730 struct ContextFormat1
732 friend struct Context;
736 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
740 const Coverage &cov = (this+coverage);
742 struct ContextClosureLookupContext lookup_context = {
743 {intersects_glyph, closure_func},
747 unsigned int count = ruleSet.len;
748 for (unsigned int i = 0; i < count; i++)
749 if (cov.intersects_coverage (c->glyphs, i)) {
750 const RuleSet &rule_set = this+ruleSet[i];
751 rule_set.closure (c, lookup_context);
755 inline bool would_apply (hb_would_apply_context_t *c) const
757 TRACE_WOULD_APPLY ();
759 const RuleSet &rule_set = this+ruleSet[(this+coverage) (c->glyphs[0])];
760 struct ContextApplyLookupContext lookup_context = {
764 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
767 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
770 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
771 if (likely (index == NOT_COVERED))
772 return TRACE_RETURN (false);
774 const RuleSet &rule_set = this+ruleSet[index];
775 struct ContextApplyLookupContext lookup_context = {
776 {match_glyph, apply_func},
779 return TRACE_RETURN (rule_set.apply (c, lookup_context));
782 inline bool sanitize (hb_sanitize_context_t *c) {
784 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
788 USHORT format; /* Format identifier--format = 1 */
790 coverage; /* Offset to Coverage table--from
791 * beginning of table */
792 OffsetArrayOf<RuleSet>
793 ruleSet; /* Array of RuleSet tables
794 * ordered by Coverage Index */
796 DEFINE_SIZE_ARRAY (6, ruleSet);
800 struct ContextFormat2
802 friend struct Context;
806 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
809 if (!(this+coverage).intersects (c->glyphs))
812 const ClassDef &class_def = this+classDef;
814 struct ContextClosureLookupContext lookup_context = {
815 {intersects_class, closure_func},
819 unsigned int count = ruleSet.len;
820 for (unsigned int i = 0; i < count; i++)
821 if (class_def.intersects_class (c->glyphs, i)) {
822 const RuleSet &rule_set = this+ruleSet[i];
823 rule_set.closure (c, lookup_context);
827 inline bool would_apply (hb_would_apply_context_t *c) const
829 TRACE_WOULD_APPLY ();
831 const ClassDef &class_def = this+classDef;
832 unsigned int index = class_def (c->glyphs[0]);
833 const RuleSet &rule_set = this+ruleSet[index];
834 struct ContextApplyLookupContext lookup_context = {
838 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
841 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
844 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
845 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
847 const ClassDef &class_def = this+classDef;
848 index = class_def (c->buffer->cur().codepoint);
849 const RuleSet &rule_set = this+ruleSet[index];
850 struct ContextApplyLookupContext lookup_context = {
851 {match_class, apply_func},
854 return TRACE_RETURN (rule_set.apply (c, lookup_context));
857 inline bool sanitize (hb_sanitize_context_t *c) {
859 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
863 USHORT format; /* Format identifier--format = 2 */
865 coverage; /* Offset to Coverage table--from
866 * beginning of table */
868 classDef; /* Offset to glyph ClassDef table--from
869 * beginning of table */
870 OffsetArrayOf<RuleSet>
871 ruleSet; /* Array of RuleSet tables
872 * ordered by class */
874 DEFINE_SIZE_ARRAY (8, ruleSet);
878 struct ContextFormat3
880 friend struct Context;
884 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
887 if (!(this+coverage[0]).intersects (c->glyphs))
890 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
891 struct ContextClosureLookupContext lookup_context = {
892 {intersects_coverage, closure_func},
895 context_closure_lookup (c,
896 glyphCount, (const USHORT *) (coverage + 1),
897 lookupCount, lookupRecord,
901 inline bool would_apply (hb_would_apply_context_t *c) const
903 TRACE_WOULD_APPLY ();
905 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
906 struct ContextApplyLookupContext lookup_context = {
907 {match_coverage, NULL},
910 return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
913 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
916 unsigned int index = (this+coverage[0]) (c->buffer->cur().codepoint);
917 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
919 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
920 struct ContextApplyLookupContext lookup_context = {
921 {match_coverage, apply_func},
924 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
927 inline bool sanitize (hb_sanitize_context_t *c) {
929 if (!c->check_struct (this)) return TRACE_RETURN (false);
930 unsigned int count = glyphCount;
931 if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false);
932 for (unsigned int i = 0; i < count; i++)
933 if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false);
934 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
935 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
939 USHORT format; /* Format identifier--format = 3 */
940 USHORT glyphCount; /* Number of glyphs in the input glyph
942 USHORT lookupCount; /* Number of LookupRecords */
944 coverage[VAR]; /* Array of offsets to Coverage
945 * table in glyph sequence order */
946 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
949 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
956 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
960 case 1: u.format1.closure (c, closure_func); break;
961 case 2: u.format2.closure (c, closure_func); break;
962 case 3: u.format3.closure (c, closure_func); break;
967 inline const Coverage &get_coverage (void) const
970 case 1: return this + u.format1.coverage;
971 case 2: return this + u.format2.coverage;
972 case 3: return this + u.format3.coverage[0];
973 default:return Null(Coverage);
977 inline bool would_apply (hb_would_apply_context_t *c) const
980 case 1: return u.format1.would_apply (c);
981 case 2: return u.format2.would_apply (c);
982 case 3: return u.format3.would_apply (c);
983 default:return false;
987 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
991 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
992 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
993 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
994 default:return TRACE_RETURN (false);
998 inline bool sanitize (hb_sanitize_context_t *c) {
1000 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1002 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1003 case 2: return TRACE_RETURN (u.format2.sanitize (c));
1004 case 3: return TRACE_RETURN (u.format3.sanitize (c));
1005 default:return TRACE_RETURN (true);
1011 USHORT format; /* Format identifier */
1012 ContextFormat1 format1;
1013 ContextFormat2 format2;
1014 ContextFormat3 format3;
1019 /* Chaining Contextual lookups */
1021 struct ChainContextClosureLookupContext
1023 ContextClosureFuncs funcs;
1024 const void *intersects_data[3];
1027 struct ChainContextApplyLookupContext
1029 ContextApplyFuncs funcs;
1030 const void *match_data[3];
1033 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
1034 unsigned int backtrackCount,
1035 const USHORT backtrack[],
1036 unsigned int inputCount, /* Including the first glyph (not matched) */
1037 const USHORT input[], /* Array of input values--start with second glyph */
1038 unsigned int lookaheadCount,
1039 const USHORT lookahead[],
1040 unsigned int lookupCount,
1041 const LookupRecord lookupRecord[],
1042 ChainContextClosureLookupContext &lookup_context)
1044 if (intersects_array (c,
1045 backtrackCount, backtrack,
1046 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
1047 && intersects_array (c,
1048 inputCount ? inputCount - 1 : 0, input,
1049 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
1050 && intersects_array (c,
1051 lookaheadCount, lookahead,
1052 lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
1054 lookupCount, lookupRecord,
1055 lookup_context.funcs.closure);
1058 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
1059 unsigned int backtrackCount,
1060 const USHORT backtrack[],
1061 unsigned int inputCount, /* Including the first glyph (not matched) */
1062 const USHORT input[], /* Array of input values--start with second glyph */
1063 unsigned int lookaheadCount,
1064 const USHORT lookahead[],
1065 unsigned int lookupCount,
1066 const LookupRecord lookupRecord[],
1067 ChainContextApplyLookupContext &lookup_context)
1069 return !backtrackCount
1071 && would_match_input (c,
1073 lookup_context.funcs.match, lookup_context.match_data[1]);
1076 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1077 unsigned int backtrackCount,
1078 const USHORT backtrack[],
1079 unsigned int inputCount, /* Including the first glyph (not matched) */
1080 const USHORT input[], /* Array of input values--start with second glyph */
1081 unsigned int lookaheadCount,
1082 const USHORT lookahead[],
1083 unsigned int lookupCount,
1084 const LookupRecord lookupRecord[],
1085 ChainContextApplyLookupContext &lookup_context)
1087 unsigned int lookahead_offset;
1088 return match_input (c,
1090 lookup_context.funcs.match, lookup_context.match_data[1],
1092 && match_backtrack (c,
1093 backtrackCount, backtrack,
1094 lookup_context.funcs.match, lookup_context.match_data[0])
1095 && match_lookahead (c,
1096 lookaheadCount, lookahead,
1097 lookup_context.funcs.match, lookup_context.match_data[2],
1101 lookupCount, lookupRecord,
1102 lookup_context.funcs.apply);
1107 friend struct ChainRuleSet;
1111 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1114 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1115 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1116 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1117 chain_context_closure_lookup (c,
1118 backtrack.len, backtrack.array,
1119 input.len, input.array,
1120 lookahead.len, lookahead.array,
1121 lookup.len, lookup.array,
1125 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1127 TRACE_WOULD_APPLY ();
1128 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1129 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1130 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1131 return TRACE_RETURN (chain_context_would_apply_lookup (c,
1132 backtrack.len, backtrack.array,
1133 input.len, input.array,
1134 lookahead.len, lookahead.array, lookup.len,
1135 lookup.array, lookup_context));
1138 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1141 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1142 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1143 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1144 return TRACE_RETURN (chain_context_apply_lookup (c,
1145 backtrack.len, backtrack.array,
1146 input.len, input.array,
1147 lookahead.len, lookahead.array, lookup.len,
1148 lookup.array, lookup_context));
1152 inline bool sanitize (hb_sanitize_context_t *c) {
1154 if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
1155 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1156 if (!input.sanitize (c)) return TRACE_RETURN (false);
1157 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1158 if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
1159 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1160 return TRACE_RETURN (lookup.sanitize (c));
1165 backtrack; /* Array of backtracking values
1166 * (to be matched before the input
1168 HeadlessArrayOf<USHORT>
1169 inputX; /* Array of input values (start with
1172 lookaheadX; /* Array of lookahead values's (to be
1173 * matched after the input sequence) */
1174 ArrayOf<LookupRecord>
1175 lookupX; /* Array of LookupRecords--in
1178 DEFINE_SIZE_MIN (8);
1183 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1186 unsigned int num_rules = rule.len;
1187 for (unsigned int i = 0; i < num_rules; i++)
1188 (this+rule[i]).closure (c, lookup_context);
1191 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1193 TRACE_WOULD_APPLY ();
1194 unsigned int num_rules = rule.len;
1195 for (unsigned int i = 0; i < num_rules; i++)
1196 if ((this+rule[i]).would_apply (c, lookup_context))
1197 return TRACE_RETURN (true);
1199 return TRACE_RETURN (false);
1202 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1205 unsigned int num_rules = rule.len;
1206 for (unsigned int i = 0; i < num_rules; i++)
1207 if ((this+rule[i]).apply (c, lookup_context))
1208 return TRACE_RETURN (true);
1210 return TRACE_RETURN (false);
1213 inline bool sanitize (hb_sanitize_context_t *c) {
1215 return TRACE_RETURN (rule.sanitize (c, this));
1219 OffsetArrayOf<ChainRule>
1220 rule; /* Array of ChainRule tables
1221 * ordered by preference */
1223 DEFINE_SIZE_ARRAY (2, rule);
1226 struct ChainContextFormat1
1228 friend struct ChainContext;
1232 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1235 const Coverage &cov = (this+coverage);
1237 struct ChainContextClosureLookupContext lookup_context = {
1238 {intersects_glyph, closure_func},
1242 unsigned int count = ruleSet.len;
1243 for (unsigned int i = 0; i < count; i++)
1244 if (cov.intersects_coverage (c->glyphs, i)) {
1245 const ChainRuleSet &rule_set = this+ruleSet[i];
1246 rule_set.closure (c, lookup_context);
1250 inline bool would_apply (hb_would_apply_context_t *c) const
1252 TRACE_WOULD_APPLY ();
1254 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage) (c->glyphs[0])];
1255 struct ChainContextApplyLookupContext lookup_context = {
1256 {match_glyph, NULL},
1259 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1262 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1265 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1266 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1268 const ChainRuleSet &rule_set = this+ruleSet[index];
1269 struct ChainContextApplyLookupContext lookup_context = {
1270 {match_glyph, apply_func},
1273 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1276 inline bool sanitize (hb_sanitize_context_t *c) {
1278 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1282 USHORT format; /* Format identifier--format = 1 */
1284 coverage; /* Offset to Coverage table--from
1285 * beginning of table */
1286 OffsetArrayOf<ChainRuleSet>
1287 ruleSet; /* Array of ChainRuleSet tables
1288 * ordered by Coverage Index */
1290 DEFINE_SIZE_ARRAY (6, ruleSet);
1293 struct ChainContextFormat2
1295 friend struct ChainContext;
1299 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1302 if (!(this+coverage).intersects (c->glyphs))
1305 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1306 const ClassDef &input_class_def = this+inputClassDef;
1307 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1309 struct ChainContextClosureLookupContext lookup_context = {
1310 {intersects_class, closure_func},
1311 {&backtrack_class_def,
1313 &lookahead_class_def}
1316 unsigned int count = ruleSet.len;
1317 for (unsigned int i = 0; i < count; i++)
1318 if (input_class_def.intersects_class (c->glyphs, i)) {
1319 const ChainRuleSet &rule_set = this+ruleSet[i];
1320 rule_set.closure (c, lookup_context);
1324 inline bool would_apply (hb_would_apply_context_t *c) const
1326 TRACE_WOULD_APPLY ();
1328 const ClassDef &input_class_def = this+inputClassDef;
1330 unsigned int index = input_class_def (c->glyphs[0]);
1331 const ChainRuleSet &rule_set = this+ruleSet[index];
1332 struct ChainContextApplyLookupContext lookup_context = {
1333 {match_class, NULL},
1334 {NULL, &input_class_def, NULL}
1336 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1339 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1342 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1343 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1345 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1346 const ClassDef &input_class_def = this+inputClassDef;
1347 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1349 index = input_class_def (c->buffer->cur().codepoint);
1350 const ChainRuleSet &rule_set = this+ruleSet[index];
1351 struct ChainContextApplyLookupContext lookup_context = {
1352 {match_class, apply_func},
1353 {&backtrack_class_def,
1355 &lookahead_class_def}
1357 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1360 inline bool sanitize (hb_sanitize_context_t *c) {
1362 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
1363 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
1364 ruleSet.sanitize (c, this));
1368 USHORT format; /* Format identifier--format = 2 */
1370 coverage; /* Offset to Coverage table--from
1371 * beginning of table */
1373 backtrackClassDef; /* Offset to glyph ClassDef table
1374 * containing backtrack sequence
1375 * data--from beginning of table */
1377 inputClassDef; /* Offset to glyph ClassDef
1378 * table containing input sequence
1379 * data--from beginning of table */
1381 lookaheadClassDef; /* Offset to glyph ClassDef table
1382 * containing lookahead sequence
1383 * data--from beginning of table */
1384 OffsetArrayOf<ChainRuleSet>
1385 ruleSet; /* Array of ChainRuleSet tables
1386 * ordered by class */
1388 DEFINE_SIZE_ARRAY (12, ruleSet);
1391 struct ChainContextFormat3
1393 friend struct ChainContext;
1397 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1400 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1402 if (!(this+input[0]).intersects (c->glyphs))
1405 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1406 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1407 struct ChainContextClosureLookupContext lookup_context = {
1408 {intersects_coverage, closure_func},
1411 chain_context_closure_lookup (c,
1412 backtrack.len, (const USHORT *) backtrack.array,
1413 input.len, (const USHORT *) input.array + 1,
1414 lookahead.len, (const USHORT *) lookahead.array,
1415 lookup.len, lookup.array,
1419 inline const Coverage &get_coverage (void) const
1421 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1422 return this+input[0];
1425 inline bool would_apply (hb_would_apply_context_t *c) const
1427 TRACE_WOULD_APPLY ();
1429 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1430 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1431 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1432 struct ChainContextApplyLookupContext lookup_context = {
1433 {match_coverage, NULL},
1436 return TRACE_RETURN (chain_context_would_apply_lookup (c,
1437 backtrack.len, (const USHORT *) backtrack.array,
1438 input.len, (const USHORT *) input.array + 1,
1439 lookahead.len, (const USHORT *) lookahead.array,
1440 lookup.len, lookup.array, lookup_context));
1443 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1446 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1448 unsigned int index = (this+input[0]) (c->buffer->cur().codepoint);
1449 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1451 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1452 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1453 struct ChainContextApplyLookupContext lookup_context = {
1454 {match_coverage, apply_func},
1457 return TRACE_RETURN (chain_context_apply_lookup (c,
1458 backtrack.len, (const USHORT *) backtrack.array,
1459 input.len, (const USHORT *) input.array + 1,
1460 lookahead.len, (const USHORT *) lookahead.array,
1461 lookup.len, lookup.array, lookup_context));
1464 inline bool sanitize (hb_sanitize_context_t *c) {
1466 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
1467 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1468 if (!input.sanitize (c, this)) return TRACE_RETURN (false);
1469 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1470 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
1471 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1472 return TRACE_RETURN (lookup.sanitize (c));
1476 USHORT format; /* Format identifier--format = 3 */
1477 OffsetArrayOf<Coverage>
1478 backtrack; /* Array of coverage tables
1479 * in backtracking sequence, in glyph
1481 OffsetArrayOf<Coverage>
1482 inputX ; /* Array of coverage
1483 * tables in input sequence, in glyph
1485 OffsetArrayOf<Coverage>
1486 lookaheadX; /* Array of coverage tables
1487 * in lookahead sequence, in glyph
1489 ArrayOf<LookupRecord>
1490 lookupX; /* Array of LookupRecords--in
1493 DEFINE_SIZE_MIN (10);
1500 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1504 case 1: u.format1.closure (c, closure_func); break;
1505 case 2: u.format2.closure (c, closure_func); break;
1506 case 3: u.format3.closure (c, closure_func); break;
1511 inline const Coverage &get_coverage (void) const
1514 case 1: return this + u.format1.coverage;
1515 case 2: return this + u.format2.coverage;
1516 case 3: return u.format3.get_coverage ();
1517 default:return Null(Coverage);
1521 inline bool would_apply (hb_would_apply_context_t *c) const
1524 case 1: return u.format1.would_apply (c);
1525 case 2: return u.format2.would_apply (c);
1526 case 3: return u.format3.would_apply (c);
1527 default:return false;
1531 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1535 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
1536 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
1537 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
1538 default:return TRACE_RETURN (false);
1542 inline bool sanitize (hb_sanitize_context_t *c) {
1544 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1546 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1547 case 2: return TRACE_RETURN (u.format2.sanitize (c));
1548 case 3: return TRACE_RETURN (u.format3.sanitize (c));
1549 default:return TRACE_RETURN (true);
1555 USHORT format; /* Format identifier */
1556 ChainContextFormat1 format1;
1557 ChainContextFormat2 format2;
1558 ChainContextFormat3 format3;
1563 struct ExtensionFormat1
1565 friend struct Extension;
1568 inline unsigned int get_type (void) const { return extensionLookupType; }
1569 inline unsigned int get_offset (void) const { return extensionOffset; }
1571 inline bool sanitize (hb_sanitize_context_t *c) {
1573 return TRACE_RETURN (c->check_struct (this));
1577 USHORT format; /* Format identifier. Set to 1. */
1578 USHORT extensionLookupType; /* Lookup type of subtable referenced
1579 * by ExtensionOffset (i.e. the
1580 * extension subtable). */
1581 ULONG extensionOffset; /* Offset to the extension subtable,
1582 * of lookup type subtable. */
1584 DEFINE_SIZE_STATIC (8);
1589 inline unsigned int get_type (void) const
1592 case 1: return u.format1.get_type ();
1596 inline unsigned int get_offset (void) const
1599 case 1: return u.format1.get_offset ();
1604 inline bool sanitize (hb_sanitize_context_t *c) {
1606 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1608 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1609 default:return TRACE_RETURN (true);
1615 USHORT format; /* Format identifier */
1616 ExtensionFormat1 format1;
1627 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
1628 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
1630 inline unsigned int get_script_count (void) const
1631 { return (this+scriptList).len; }
1632 inline const Tag& get_script_tag (unsigned int i) const
1633 { return (this+scriptList).get_tag (i); }
1634 inline unsigned int get_script_tags (unsigned int start_offset,
1635 unsigned int *script_count /* IN/OUT */,
1636 hb_tag_t *script_tags /* OUT */) const
1637 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1638 inline const Script& get_script (unsigned int i) const
1639 { return (this+scriptList)[i]; }
1640 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1641 { return (this+scriptList).find_index (tag, index); }
1643 inline unsigned int get_feature_count (void) const
1644 { return (this+featureList).len; }
1645 inline const Tag& get_feature_tag (unsigned int i) const
1646 { return (this+featureList).get_tag (i); }
1647 inline unsigned int get_feature_tags (unsigned int start_offset,
1648 unsigned int *feature_count /* IN/OUT */,
1649 hb_tag_t *feature_tags /* OUT */) const
1650 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1651 inline const Feature& get_feature (unsigned int i) const
1652 { return (this+featureList)[i]; }
1653 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1654 { return (this+featureList).find_index (tag, index); }
1656 inline unsigned int get_lookup_count (void) const
1657 { return (this+lookupList).len; }
1658 inline const Lookup& get_lookup (unsigned int i) const
1659 { return (this+lookupList)[i]; }
1661 inline bool sanitize (hb_sanitize_context_t *c) {
1663 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
1664 scriptList.sanitize (c, this) &&
1665 featureList.sanitize (c, this) &&
1666 lookupList.sanitize (c, this));
1670 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
1672 OffsetTo<ScriptList>
1673 scriptList; /* ScriptList table */
1674 OffsetTo<FeatureList>
1675 featureList; /* FeatureList table */
1676 OffsetTo<LookupList>
1677 lookupList; /* LookupList table */
1679 DEFINE_SIZE_STATIC (10);
1684 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */