2 * Copyright (C) 2007,2008,2009,2010 Red Hat, Inc.
4 * This is part of HarfBuzz, a text shaping library.
6 * Permission is hereby granted, without written agreement and without
7 * license or royalty fees, to use, copy, modify, and distribute this
8 * software and its documentation for any purpose, provided that the
9 * above copyright notice and the following two paragraphs appear in
10 * all copies of this software.
12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
20 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 * Red Hat Author(s): Behdad Esfahbod
27 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
28 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #include "hb-buffer-private.hh"
31 #include "hb-ot-layout-gdef-private.hh"
34 #ifndef HB_DEBUG_APPLY
35 #define HB_DEBUG_APPLY HB_DEBUG+0
38 #define TRACE_APPLY() \
39 hb_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", HB_FUNC, this); \
42 struct hb_apply_context_t
44 unsigned int debug_depth;
45 hb_ot_layout_context_t *layout;
47 hb_mask_t lookup_mask;
48 unsigned int context_length;
49 unsigned int nesting_level_left;
50 unsigned int lookup_flag;
51 unsigned int property; /* propety of first glyph (TODO remove) */
56 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
57 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
62 apply_lookup_func_t apply;
66 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
68 return glyph_id == value;
71 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
73 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
74 return class_def.get_class (glyph_id) == value;
77 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
79 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
80 return (data+coverage) (glyph_id) != NOT_COVERED;
84 static inline bool match_input (hb_apply_context_t *c,
85 unsigned int count, /* Including the first glyph (not matched) */
86 const USHORT input[], /* Array of input values--start with second glyph */
87 match_func_t match_func,
88 const void *match_data,
89 unsigned int *context_length_out)
92 unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
93 if (unlikely (c->buffer->i + count > end))
96 for (i = 1, j = c->buffer->i + 1; i < count; i++, j++)
98 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->lookup_flag, NULL))
100 if (unlikely (j + count - i == end))
105 if (likely (!match_func (c->buffer->info[j].codepoint, input[i - 1], match_data)))
109 *context_length_out = j - c->buffer->i;
114 static inline bool match_backtrack (hb_apply_context_t *c,
116 const USHORT backtrack[],
117 match_func_t match_func,
118 const void *match_data)
120 if (unlikely (c->buffer->out_len < count))
123 for (unsigned int i = 0, j = c->buffer->out_len - 1; i < count; i++, j--)
125 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->out_info[j], c->lookup_flag, NULL))
127 if (unlikely (j + 1 == count - i))
132 if (likely (!match_func (c->buffer->out_info[j].codepoint, backtrack[i], match_data)))
139 static inline bool match_lookahead (hb_apply_context_t *c,
141 const USHORT lookahead[],
142 match_func_t match_func,
143 const void *match_data,
147 unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
148 if (unlikely (c->buffer->i + offset + count > end))
151 for (i = 0, j = c->buffer->i + offset; i < count; i++, j++)
153 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->lookup_flag, NULL))
155 if (unlikely (j + count - i == end))
160 if (likely (!match_func (c->buffer->info[j].codepoint, lookahead[i], match_data)))
170 inline bool sanitize (hb_sanitize_context_t *c) {
172 return c->check_struct (this);
175 USHORT sequenceIndex; /* Index into current glyph
176 * sequence--first glyph = 0 */
177 USHORT lookupListIndex; /* Lookup to apply to that
178 * position--zero--based */
180 DEFINE_SIZE_STATIC (4);
183 static inline bool apply_lookup (hb_apply_context_t *c,
184 unsigned int count, /* Including the first glyph */
185 unsigned int lookupCount,
186 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
187 apply_lookup_func_t apply_func)
189 unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
190 if (unlikely (c->buffer->i + count > end))
193 /* TODO We don't support lookupRecord arrays that are not increasing:
194 * Should be easy for in_place ones at least. */
196 /* Note: If sublookup is reverse, i will underflow after the first loop
197 * and we jump out of it. Not entirely disastrous. So we don't check
198 * for reverse lookup here.
200 for (unsigned int i = 0; i < count; /* NOP */)
202 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[c->buffer->i], c->lookup_flag, NULL))
204 if (unlikely (c->buffer->i == end))
206 /* No lookup applied for this index */
207 c->buffer->next_glyph ();
210 if (lookupCount && i == lookupRecord->sequenceIndex)
212 unsigned int old_pos = c->buffer->i;
215 bool done = apply_func (c, lookupRecord->lookupListIndex);
219 /* Err, this is wrong if the lookup jumped over some glyphs */
220 i += c->buffer->i - old_pos;
221 if (unlikely (c->buffer->i == end))
230 /* No lookup applied for this index */
231 c->buffer->next_glyph ();
240 /* Contextual lookups */
242 struct ContextLookupContext
245 const void *match_data;
248 static inline bool context_lookup (hb_apply_context_t *c,
249 unsigned int inputCount, /* Including the first glyph (not matched) */
250 const USHORT input[], /* Array of input values--start with second glyph */
251 unsigned int lookupCount,
252 const LookupRecord lookupRecord[],
253 ContextLookupContext &lookup_context)
255 hb_apply_context_t new_context = *c;
256 return match_input (c,
258 lookup_context.funcs.match, lookup_context.match_data,
259 &new_context.context_length)
260 && apply_lookup (&new_context,
262 lookupCount, lookupRecord,
263 lookup_context.funcs.apply);
268 friend struct RuleSet;
271 inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
274 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
275 return context_lookup (c,
277 lookupCount, lookupRecord,
282 inline bool sanitize (hb_sanitize_context_t *c) {
284 return inputCount.sanitize (c)
285 && lookupCount.sanitize (c)
286 && c->check_range (input,
287 input[0].static_size * inputCount
288 + lookupRecordX[0].static_size * lookupCount);
292 USHORT inputCount; /* Total number of glyphs in input
293 * glyph sequence--includes the first
295 USHORT lookupCount; /* Number of LookupRecords */
296 USHORT input[VAR]; /* Array of match inputs--start with
298 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
301 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
306 inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
309 unsigned int num_rules = rule.len;
310 for (unsigned int i = 0; i < num_rules; i++)
312 if ((this+rule[i]).apply (c, lookup_context))
319 inline bool sanitize (hb_sanitize_context_t *c) {
321 return rule.sanitize (c, this);
326 rule; /* Array of Rule tables
327 * ordered by preference */
329 DEFINE_SIZE_ARRAY (2, rule);
333 struct ContextFormat1
335 friend struct Context;
338 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
341 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
342 if (likely (index == NOT_COVERED))
345 const RuleSet &rule_set = this+ruleSet[index];
346 struct ContextLookupContext lookup_context = {
347 {match_glyph, apply_func},
350 return rule_set.apply (c, lookup_context);
353 inline bool sanitize (hb_sanitize_context_t *c) {
355 return coverage.sanitize (c, this)
356 && ruleSet.sanitize (c, this);
360 USHORT format; /* Format identifier--format = 1 */
362 coverage; /* Offset to Coverage table--from
363 * beginning of table */
364 OffsetArrayOf<RuleSet>
365 ruleSet; /* Array of RuleSet tables
366 * ordered by Coverage Index */
368 DEFINE_SIZE_ARRAY (6, ruleSet);
372 struct ContextFormat2
374 friend struct Context;
377 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
380 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
381 if (likely (index == NOT_COVERED))
384 const ClassDef &class_def = this+classDef;
385 index = class_def (c->buffer->info[c->buffer->i].codepoint);
386 const RuleSet &rule_set = this+ruleSet[index];
387 /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
388 * them across subrule lookups. Not sure it's worth it.
390 struct ContextLookupContext lookup_context = {
391 {match_class, apply_func},
394 return rule_set.apply (c, lookup_context);
397 inline bool sanitize (hb_sanitize_context_t *c) {
399 return coverage.sanitize (c, this)
400 && classDef.sanitize (c, this)
401 && ruleSet.sanitize (c, this);
405 USHORT format; /* Format identifier--format = 2 */
407 coverage; /* Offset to Coverage table--from
408 * beginning of table */
410 classDef; /* Offset to glyph ClassDef table--from
411 * beginning of table */
412 OffsetArrayOf<RuleSet>
413 ruleSet; /* Array of RuleSet tables
414 * ordered by class */
416 DEFINE_SIZE_ARRAY (8, ruleSet);
420 struct ContextFormat3
422 friend struct Context;
425 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
428 unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->i].codepoint);
429 if (likely (index == NOT_COVERED))
432 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
433 struct ContextLookupContext lookup_context = {
434 {match_coverage, apply_func},
437 return context_lookup (c,
438 glyphCount, (const USHORT *) (coverage + 1),
439 lookupCount, lookupRecord,
443 inline bool sanitize (hb_sanitize_context_t *c) {
445 if (!c->check_struct (this)) return false;
446 unsigned int count = glyphCount;
447 if (!c->check_array (coverage, coverage[0].static_size, count)) return false;
448 for (unsigned int i = 0; i < count; i++)
449 if (!coverage[i].sanitize (c, this)) return false;
450 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
451 return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
455 USHORT format; /* Format identifier--format = 3 */
456 USHORT glyphCount; /* Number of glyphs in the input glyph
458 USHORT lookupCount; /* Number of LookupRecords */
460 coverage[VAR]; /* Array of offsets to Coverage
461 * table in glyph sequence order */
462 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
465 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
471 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
475 case 1: return u.format1.apply (c, apply_func);
476 case 2: return u.format2.apply (c, apply_func);
477 case 3: return u.format3.apply (c, apply_func);
478 default:return false;
482 inline bool sanitize (hb_sanitize_context_t *c) {
484 if (!u.format.sanitize (c)) return false;
486 case 1: return u.format1.sanitize (c);
487 case 2: return u.format2.sanitize (c);
488 case 3: return u.format3.sanitize (c);
495 USHORT format; /* Format identifier */
496 ContextFormat1 format1;
497 ContextFormat2 format2;
498 ContextFormat3 format3;
503 /* Chaining Contextual lookups */
505 struct ChainContextLookupContext
508 const void *match_data[3];
511 static inline bool chain_context_lookup (hb_apply_context_t *c,
512 unsigned int backtrackCount,
513 const USHORT backtrack[],
514 unsigned int inputCount, /* Including the first glyph (not matched) */
515 const USHORT input[], /* Array of input values--start with second glyph */
516 unsigned int lookaheadCount,
517 const USHORT lookahead[],
518 unsigned int lookupCount,
519 const LookupRecord lookupRecord[],
520 ChainContextLookupContext &lookup_context)
523 if (unlikely (c->buffer->out_len < backtrackCount ||
524 c->buffer->i + inputCount + lookaheadCount > c->buffer->len ||
525 inputCount + lookaheadCount > c->context_length))
528 hb_apply_context_t new_context = *c;
529 return match_backtrack (c,
530 backtrackCount, backtrack,
531 lookup_context.funcs.match, lookup_context.match_data[0])
534 lookup_context.funcs.match, lookup_context.match_data[1],
535 &new_context.context_length)
536 && match_lookahead (c,
537 lookaheadCount, lookahead,
538 lookup_context.funcs.match, lookup_context.match_data[2],
539 new_context.context_length)
540 && apply_lookup (&new_context,
542 lookupCount, lookupRecord,
543 lookup_context.funcs.apply);
548 friend struct ChainRuleSet;
551 inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
554 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
555 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
556 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
557 return chain_context_lookup (c,
558 backtrack.len, backtrack.array,
559 input.len, input.array,
560 lookahead.len, lookahead.array,
561 lookup.len, lookup.array,
567 inline bool sanitize (hb_sanitize_context_t *c) {
569 if (!backtrack.sanitize (c)) return false;
570 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
571 if (!input.sanitize (c)) return false;
572 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
573 if (!lookahead.sanitize (c)) return false;
574 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
575 return lookup.sanitize (c);
580 backtrack; /* Array of backtracking values
581 * (to be matched before the input
583 HeadlessArrayOf<USHORT>
584 inputX; /* Array of input values (start with
587 lookaheadX; /* Array of lookahead values's (to be
588 * matched after the input sequence) */
589 ArrayOf<LookupRecord>
590 lookupX; /* Array of LookupRecords--in
598 inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
601 unsigned int num_rules = rule.len;
602 for (unsigned int i = 0; i < num_rules; i++)
604 if ((this+rule[i]).apply (c, lookup_context))
611 inline bool sanitize (hb_sanitize_context_t *c) {
613 return rule.sanitize (c, this);
617 OffsetArrayOf<ChainRule>
618 rule; /* Array of ChainRule tables
619 * ordered by preference */
621 DEFINE_SIZE_ARRAY (2, rule);
624 struct ChainContextFormat1
626 friend struct ChainContext;
629 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
632 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
633 if (likely (index == NOT_COVERED))
636 const ChainRuleSet &rule_set = this+ruleSet[index];
637 struct ChainContextLookupContext lookup_context = {
638 {match_glyph, apply_func},
641 return rule_set.apply (c, lookup_context);
644 inline bool sanitize (hb_sanitize_context_t *c) {
646 return coverage.sanitize (c, this)
647 && ruleSet.sanitize (c, this);
651 USHORT format; /* Format identifier--format = 1 */
653 coverage; /* Offset to Coverage table--from
654 * beginning of table */
655 OffsetArrayOf<ChainRuleSet>
656 ruleSet; /* Array of ChainRuleSet tables
657 * ordered by Coverage Index */
659 DEFINE_SIZE_ARRAY (6, ruleSet);
662 struct ChainContextFormat2
664 friend struct ChainContext;
667 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
670 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
671 if (likely (index == NOT_COVERED))
674 const ClassDef &backtrack_class_def = this+backtrackClassDef;
675 const ClassDef &input_class_def = this+inputClassDef;
676 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
678 index = input_class_def (c->buffer->info[c->buffer->i].codepoint);
679 const ChainRuleSet &rule_set = this+ruleSet[index];
680 /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
681 * them across subrule lookups. Not sure it's worth it.
683 struct ChainContextLookupContext lookup_context = {
684 {match_class, apply_func},
685 {&backtrack_class_def,
687 &lookahead_class_def}
689 return rule_set.apply (c, lookup_context);
692 inline bool sanitize (hb_sanitize_context_t *c) {
694 return coverage.sanitize (c, this)
695 && backtrackClassDef.sanitize (c, this)
696 && inputClassDef.sanitize (c, this)
697 && lookaheadClassDef.sanitize (c, this)
698 && ruleSet.sanitize (c, this);
702 USHORT format; /* Format identifier--format = 2 */
704 coverage; /* Offset to Coverage table--from
705 * beginning of table */
707 backtrackClassDef; /* Offset to glyph ClassDef table
708 * containing backtrack sequence
709 * data--from beginning of table */
711 inputClassDef; /* Offset to glyph ClassDef
712 * table containing input sequence
713 * data--from beginning of table */
715 lookaheadClassDef; /* Offset to glyph ClassDef table
716 * containing lookahead sequence
717 * data--from beginning of table */
718 OffsetArrayOf<ChainRuleSet>
719 ruleSet; /* Array of ChainRuleSet tables
720 * ordered by class */
722 DEFINE_SIZE_ARRAY (12, ruleSet);
725 struct ChainContextFormat3
727 friend struct ChainContext;
731 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
734 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
736 unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->i].codepoint);
737 if (likely (index == NOT_COVERED))
740 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
741 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
742 struct ChainContextLookupContext lookup_context = {
743 {match_coverage, apply_func},
746 return chain_context_lookup (c,
747 backtrack.len, (const USHORT *) backtrack.array,
748 input.len, (const USHORT *) input.array + 1,
749 lookahead.len, (const USHORT *) lookahead.array,
750 lookup.len, lookup.array,
755 inline bool sanitize (hb_sanitize_context_t *c) {
757 if (!backtrack.sanitize (c, this)) return false;
758 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
759 if (!input.sanitize (c, this)) return false;
760 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
761 if (!lookahead.sanitize (c, this)) return false;
762 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
763 return lookup.sanitize (c);
767 USHORT format; /* Format identifier--format = 3 */
768 OffsetArrayOf<Coverage>
769 backtrack; /* Array of coverage tables
770 * in backtracking sequence, in glyph
772 OffsetArrayOf<Coverage>
773 inputX ; /* Array of coverage
774 * tables in input sequence, in glyph
776 OffsetArrayOf<Coverage>
777 lookaheadX; /* Array of coverage tables
778 * in lookahead sequence, in glyph
780 ArrayOf<LookupRecord>
781 lookupX; /* Array of LookupRecords--in
784 DEFINE_SIZE_MIN (10);
790 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
794 case 1: return u.format1.apply (c, apply_func);
795 case 2: return u.format2.apply (c, apply_func);
796 case 3: return u.format3.apply (c, apply_func);
797 default:return false;
801 inline bool sanitize (hb_sanitize_context_t *c) {
803 if (!u.format.sanitize (c)) return false;
805 case 1: return u.format1.sanitize (c);
806 case 2: return u.format2.sanitize (c);
807 case 3: return u.format3.sanitize (c);
814 USHORT format; /* Format identifier */
815 ChainContextFormat1 format1;
816 ChainContextFormat2 format2;
817 ChainContextFormat3 format3;
822 struct ExtensionFormat1
824 friend struct Extension;
827 inline unsigned int get_type (void) const { return extensionLookupType; }
828 inline unsigned int get_offset (void) const { return extensionOffset; }
830 inline bool sanitize (hb_sanitize_context_t *c) {
832 return c->check_struct (this);
836 USHORT format; /* Format identifier. Set to 1. */
837 USHORT extensionLookupType; /* Lookup type of subtable referenced
838 * by ExtensionOffset (i.e. the
839 * extension subtable). */
840 ULONG extensionOffset; /* Offset to the extension subtable,
841 * of lookup type subtable. */
843 DEFINE_SIZE_STATIC (8);
848 inline unsigned int get_type (void) const
851 case 1: return u.format1.get_type ();
855 inline unsigned int get_offset (void) const
858 case 1: return u.format1.get_offset ();
863 inline bool sanitize (hb_sanitize_context_t *c) {
865 if (!u.format.sanitize (c)) return false;
867 case 1: return u.format1.sanitize (c);
874 USHORT format; /* Format identifier */
875 ExtensionFormat1 format1;
886 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
887 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
889 inline unsigned int get_script_count (void) const
890 { return (this+scriptList).len; }
891 inline const Tag& get_script_tag (unsigned int i) const
892 { return (this+scriptList).get_tag (i); }
893 inline unsigned int get_script_tags (unsigned int start_offset,
894 unsigned int *script_count /* IN/OUT */,
895 hb_tag_t *script_tags /* OUT */) const
896 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
897 inline const Script& get_script (unsigned int i) const
898 { return (this+scriptList)[i]; }
899 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
900 { return (this+scriptList).find_index (tag, index); }
902 inline unsigned int get_feature_count (void) const
903 { return (this+featureList).len; }
904 inline const Tag& get_feature_tag (unsigned int i) const
905 { return (this+featureList).get_tag (i); }
906 inline unsigned int get_feature_tags (unsigned int start_offset,
907 unsigned int *feature_count /* IN/OUT */,
908 hb_tag_t *feature_tags /* OUT */) const
909 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
910 inline const Feature& get_feature (unsigned int i) const
911 { return (this+featureList)[i]; }
912 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
913 { return (this+featureList).find_index (tag, index); }
915 inline unsigned int get_lookup_count (void) const
916 { return (this+lookupList).len; }
917 inline const Lookup& get_lookup (unsigned int i) const
918 { return (this+lookupList)[i]; }
920 inline bool sanitize (hb_sanitize_context_t *c) {
922 return version.sanitize (c) && likely (version.major == 1)
923 && scriptList.sanitize (c, this)
924 && featureList.sanitize (c, this)
925 && lookupList.sanitize (c, this);
929 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
932 scriptList; /* ScriptList table */
933 OffsetTo<FeatureList>
934 featureList; /* FeatureList table */
936 lookupList; /* LookupList table */
938 DEFINE_SIZE_STATIC (10);
942 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */