2 * Copyright (C) 2007,2008,2009,2010 Red Hat, Inc.
4 * This is part of HarfBuzz, a text shaping library.
6 * Permission is hereby granted, without written agreement and without
7 * license or royalty fees, to use, copy, modify, and distribute this
8 * software and its documentation for any purpose, provided that the
9 * above copyright notice and the following two paragraphs appear in
10 * all copies of this software.
12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
20 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 * Red Hat Author(s): Behdad Esfahbod
27 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
28 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #include "hb-buffer-private.h"
31 #include "hb-ot-layout-gdef-private.hh"
34 #ifndef HB_DEBUG_APPLY
35 #define HB_DEBUG_APPLY HB_DEBUG+0
38 #define TRACE_APPLY() \
39 hb_trace_t<HB_DEBUG_APPLY> trace (&context->debug_depth); \
40 trace.log ("APPLY", HB_FUNC, this);
43 struct hb_apply_context_t
45 unsigned int debug_depth;
46 hb_ot_layout_context_t *layout;
48 unsigned int context_length;
49 unsigned int nesting_level_left;
50 unsigned int lookup_flag;
51 unsigned int property; /* propety of first glyph (TODO remove) */
58 #define BUFFER context->buffer
61 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const char *data);
62 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *context, unsigned int lookup_index);
67 apply_lookup_func_t apply;
71 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const char *data HB_UNUSED)
73 return glyph_id == value;
76 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const char *data)
78 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
79 return class_def.get_class (glyph_id) == value;
82 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const char *data)
84 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
85 return (data+coverage) (glyph_id) != NOT_COVERED;
89 static inline bool match_input (hb_apply_context_t *context,
90 unsigned int count, /* Including the first glyph (not matched) */
91 const USHORT input[], /* Array of input values--start with second glyph */
92 match_func_t match_func,
93 const char *match_data,
94 unsigned int *context_length_out)
97 unsigned int end = MIN (context->buffer->in_length, context->buffer->in_pos + context->context_length);
98 if (unlikely (context->buffer->in_pos + count > end))
101 for (i = 1, j = context->buffer->in_pos + 1; i < count; i++, j++)
103 while (_hb_ot_layout_skip_mark (context->layout->face, IN_INFO (j), context->lookup_flag, NULL))
105 if (unlikely (j + count - i == end))
110 if (likely (!match_func (IN_GLYPH (j), input[i - 1], match_data)))
114 *context_length_out = j - context->buffer->in_pos;
119 static inline bool match_backtrack (hb_apply_context_t *context,
121 const USHORT backtrack[],
122 match_func_t match_func,
123 const char *match_data)
125 if (unlikely (context->buffer->out_pos < count))
128 for (unsigned int i = 0, j = context->buffer->out_pos - 1; i < count; i++, j--)
130 while (_hb_ot_layout_skip_mark (context->layout->face, OUT_INFO (j), context->lookup_flag, NULL))
132 if (unlikely (j + 1 == count - i))
137 if (likely (!match_func (OUT_GLYPH (j), backtrack[i], match_data)))
144 static inline bool match_lookahead (hb_apply_context_t *context,
146 const USHORT lookahead[],
147 match_func_t match_func,
148 const char *match_data,
152 unsigned int end = MIN (context->buffer->in_length, context->buffer->in_pos + context->context_length);
153 if (unlikely (context->buffer->in_pos + offset + count > end))
156 for (i = 0, j = context->buffer->in_pos + offset; i < count; i++, j++)
158 while (_hb_ot_layout_skip_mark (context->layout->face, OUT_INFO (j), context->lookup_flag, NULL))
160 if (unlikely (j + count - i == end))
165 if (likely (!match_func (IN_GLYPH (j), lookahead[i], match_data)))
175 inline bool sanitize (hb_sanitize_context_t *context) {
177 return context->check_struct (this);
180 USHORT sequenceIndex; /* Index into current glyph
181 * sequence--first glyph = 0 */
182 USHORT lookupListIndex; /* Lookup to apply to that
183 * position--zero--based */
185 DEFINE_SIZE_STATIC (4);
188 static inline bool apply_lookup (hb_apply_context_t *context,
189 unsigned int count, /* Including the first glyph */
190 unsigned int lookupCount,
191 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
192 apply_lookup_func_t apply_func)
194 unsigned int end = MIN (context->buffer->in_length, context->buffer->in_pos + context->context_length);
195 if (unlikely (context->buffer->in_pos + count > end))
198 /* TODO We don't support lookupRecord arrays that are not increasing:
199 * Should be easy for in_place ones at least. */
201 /* Note: If sublookup is reverse, i will underflow after the first loop
202 * and we jump out of it. Not entirely disastrous. So we don't check
203 * for reverse lookup here.
205 for (unsigned int i = 0; i < count; /* NOP */)
207 while (_hb_ot_layout_skip_mark (context->layout->face, IN_CURINFO (), context->lookup_flag, NULL))
209 if (unlikely (context->buffer->in_pos == end))
211 /* No lookup applied for this index */
212 _hb_buffer_next_glyph (context->buffer);
215 if (lookupCount && i == lookupRecord->sequenceIndex)
217 unsigned int old_pos = context->buffer->in_pos;
220 bool done = apply_func (context, lookupRecord->lookupListIndex);
224 /* Err, this is wrong if the lookup jumped over some glyphs */
225 i += context->buffer->in_pos - old_pos;
226 if (unlikely (context->buffer->in_pos == end))
235 /* No lookup applied for this index */
236 _hb_buffer_next_glyph (context->buffer);
245 /* Contextual lookups */
247 struct ContextLookupContext
250 const char *match_data;
253 static inline bool context_lookup (hb_apply_context_t *context,
254 unsigned int inputCount, /* Including the first glyph (not matched) */
255 const USHORT input[], /* Array of input values--start with second glyph */
256 unsigned int lookupCount,
257 const LookupRecord lookupRecord[],
258 ContextLookupContext &lookup_context)
260 hb_apply_context_t new_context = *context;
261 return match_input (context,
263 lookup_context.funcs.match, lookup_context.match_data,
264 &new_context.context_length)
265 && apply_lookup (&new_context,
267 lookupCount, lookupRecord,
268 lookup_context.funcs.apply);
273 friend struct RuleSet;
276 inline bool apply (hb_apply_context_t *context, ContextLookupContext &lookup_context) const
279 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
280 return context_lookup (context,
282 lookupCount, lookupRecord,
287 inline bool sanitize (hb_sanitize_context_t *context) {
289 return inputCount.sanitize (context)
290 && lookupCount.sanitize (context)
291 && context->check_range (input,
292 input[0].static_size * inputCount
293 + lookupRecordX[0].static_size * lookupCount);
297 USHORT inputCount; /* Total number of glyphs in input
298 * glyph sequence--includes the first
300 USHORT lookupCount; /* Number of LookupRecords */
301 USHORT input[VAR]; /* Array of match inputs--start with
303 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
306 DEFINE_SIZE_VAR2 (4, USHORT, LookupRecord);
311 inline bool apply (hb_apply_context_t *context, ContextLookupContext &lookup_context) const
314 unsigned int num_rules = rule.len;
315 for (unsigned int i = 0; i < num_rules; i++)
317 if ((this+rule[i]).apply (context, lookup_context))
324 inline bool sanitize (hb_sanitize_context_t *context) {
326 return rule.sanitize (context, this);
331 rule; /* Array of Rule tables
332 * ordered by preference */
336 struct ContextFormat1
338 friend struct Context;
341 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
344 unsigned int index = (this+coverage) (IN_CURGLYPH ());
345 if (likely (index == NOT_COVERED))
348 const RuleSet &rule_set = this+ruleSet[index];
349 struct ContextLookupContext lookup_context = {
350 {match_glyph, apply_func},
353 return rule_set.apply (context, lookup_context);
356 inline bool sanitize (hb_sanitize_context_t *context) {
358 return coverage.sanitize (context, this)
359 && ruleSet.sanitize (context, this);
363 USHORT format; /* Format identifier--format = 1 */
365 coverage; /* Offset to Coverage table--from
366 * beginning of table */
367 OffsetArrayOf<RuleSet>
368 ruleSet; /* Array of RuleSet tables
369 * ordered by Coverage Index */
371 DEFINE_SIZE_VAR (6, OffsetTo<RuleSet>);
375 struct ContextFormat2
377 friend struct Context;
380 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
383 unsigned int index = (this+coverage) (IN_CURGLYPH ());
384 if (likely (index == NOT_COVERED))
387 const ClassDef &class_def = this+classDef;
388 index = class_def (IN_CURGLYPH ());
389 const RuleSet &rule_set = this+ruleSet[index];
390 /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
391 * them across subrule lookups. Not sure it's worth it.
393 struct ContextLookupContext lookup_context = {
394 {match_class, apply_func},
397 return rule_set.apply (context, lookup_context);
400 inline bool sanitize (hb_sanitize_context_t *context) {
402 return coverage.sanitize (context, this)
403 && classDef.sanitize (context, this)
404 && ruleSet.sanitize (context, this);
408 USHORT format; /* Format identifier--format = 2 */
410 coverage; /* Offset to Coverage table--from
411 * beginning of table */
413 classDef; /* Offset to glyph ClassDef table--from
414 * beginning of table */
415 OffsetArrayOf<RuleSet>
416 ruleSet; /* Array of RuleSet tables
417 * ordered by class */
419 DEFINE_SIZE_VAR (8, OffsetTo<RuleSet>);
423 struct ContextFormat3
425 friend struct Context;
428 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
431 unsigned int index = (this+coverage[0]) (IN_CURGLYPH ());
432 if (likely (index == NOT_COVERED))
435 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
436 struct ContextLookupContext lookup_context = {
437 {match_coverage, apply_func},
440 return context_lookup (context,
441 glyphCount, (const USHORT *) (coverage + 1),
442 lookupCount, lookupRecord,
446 inline bool sanitize (hb_sanitize_context_t *context) {
448 if (!context->check_struct (this)) return false;
449 unsigned int count = glyphCount;
450 if (!context->check_array (coverage, coverage[0].static_size, count)) return false;
451 for (unsigned int i = 0; i < count; i++)
452 if (!coverage[i].sanitize (context, this)) return false;
453 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
454 return context->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
458 USHORT format; /* Format identifier--format = 3 */
459 USHORT glyphCount; /* Number of glyphs in the input glyph
461 USHORT lookupCount; /* Number of LookupRecords */
463 coverage[VAR]; /* Array of offsets to Coverage
464 * table in glyph sequence order */
465 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
468 DEFINE_SIZE_VAR2 (6, OffsetTo<Coverage>, LookupRecord);
474 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
478 case 1: return u.format1->apply (context, apply_func);
479 case 2: return u.format2->apply (context, apply_func);
480 case 3: return u.format3->apply (context, apply_func);
481 default:return false;
485 inline bool sanitize (hb_sanitize_context_t *context) {
487 if (!u.format.sanitize (context)) return false;
489 case 1: return u.format1->sanitize (context);
490 case 2: return u.format2->sanitize (context);
491 case 3: return u.format3->sanitize (context);
498 USHORT format; /* Format identifier */
499 ContextFormat1 format1[VAR];
500 ContextFormat2 format2[VAR];
501 ContextFormat3 format3[VAR];
506 /* Chaining Contextual lookups */
508 struct ChainContextLookupContext
511 const char *match_data[3];
514 static inline bool chain_context_lookup (hb_apply_context_t *context,
515 unsigned int backtrackCount,
516 const USHORT backtrack[],
517 unsigned int inputCount, /* Including the first glyph (not matched) */
518 const USHORT input[], /* Array of input values--start with second glyph */
519 unsigned int lookaheadCount,
520 const USHORT lookahead[],
521 unsigned int lookupCount,
522 const LookupRecord lookupRecord[],
523 ChainContextLookupContext &lookup_context)
526 if (unlikely (context->buffer->out_pos < backtrackCount ||
527 context->buffer->in_pos + inputCount + lookaheadCount > context->buffer->in_length ||
528 inputCount + lookaheadCount > context->context_length))
531 hb_apply_context_t new_context = *context;
532 return match_backtrack (context,
533 backtrackCount, backtrack,
534 lookup_context.funcs.match, lookup_context.match_data[0])
535 && match_input (context,
537 lookup_context.funcs.match, lookup_context.match_data[1],
538 &new_context.context_length)
539 && match_lookahead (context,
540 lookaheadCount, lookahead,
541 lookup_context.funcs.match, lookup_context.match_data[2],
542 new_context.context_length)
543 && apply_lookup (&new_context,
545 lookupCount, lookupRecord,
546 lookup_context.funcs.apply);
551 friend struct ChainRuleSet;
554 inline bool apply (hb_apply_context_t *context, ChainContextLookupContext &lookup_context) const
557 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
558 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
559 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
560 return chain_context_lookup (context,
561 backtrack.len, backtrack.array(),
562 input.len, input.array(),
563 lookahead.len, lookahead.array(),
564 lookup.len, lookup.array(),
570 inline bool sanitize (hb_sanitize_context_t *context) {
572 if (!backtrack.sanitize (context)) return false;
573 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
574 if (!input.sanitize (context)) return false;
575 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
576 if (!lookahead.sanitize (context)) return false;
577 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
578 return lookup.sanitize (context);
583 backtrack; /* Array of backtracking values
584 * (to be matched before the input
586 HeadlessArrayOf<USHORT>
587 inputX; /* Array of input values (start with
590 lookaheadX; /* Array of lookahead values's (to be
591 * matched after the input sequence) */
592 ArrayOf<LookupRecord>
593 lookupX; /* Array of LookupRecords--in
601 inline bool apply (hb_apply_context_t *context, ChainContextLookupContext &lookup_context) const
604 unsigned int num_rules = rule.len;
605 for (unsigned int i = 0; i < num_rules; i++)
607 if ((this+rule[i]).apply (context, lookup_context))
614 inline bool sanitize (hb_sanitize_context_t *context) {
616 return rule.sanitize (context, this);
620 OffsetArrayOf<ChainRule>
621 rule; /* Array of ChainRule tables
622 * ordered by preference */
624 DEFINE_SIZE_VAR (2, OffsetTo<ChainRule>);
627 struct ChainContextFormat1
629 friend struct ChainContext;
632 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
635 unsigned int index = (this+coverage) (IN_CURGLYPH ());
636 if (likely (index == NOT_COVERED))
639 const ChainRuleSet &rule_set = this+ruleSet[index];
640 struct ChainContextLookupContext lookup_context = {
641 {match_glyph, apply_func},
644 return rule_set.apply (context, lookup_context);
647 inline bool sanitize (hb_sanitize_context_t *context) {
649 return coverage.sanitize (context, this)
650 && ruleSet.sanitize (context, this);
654 USHORT format; /* Format identifier--format = 1 */
656 coverage; /* Offset to Coverage table--from
657 * beginning of table */
658 OffsetArrayOf<ChainRuleSet>
659 ruleSet; /* Array of ChainRuleSet tables
660 * ordered by Coverage Index */
662 DEFINE_SIZE_VAR (6, OffsetTo<ChainRuleSet>);
665 struct ChainContextFormat2
667 friend struct ChainContext;
670 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
673 unsigned int index = (this+coverage) (IN_CURGLYPH ());
674 if (likely (index == NOT_COVERED))
677 const ClassDef &backtrack_class_def = this+backtrackClassDef;
678 const ClassDef &input_class_def = this+inputClassDef;
679 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
681 index = input_class_def (IN_CURGLYPH ());
682 const ChainRuleSet &rule_set = this+ruleSet[index];
683 /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
684 * them across subrule lookups. Not sure it's worth it.
686 struct ChainContextLookupContext lookup_context = {
687 {match_class, apply_func},
688 {CharP(&backtrack_class_def),
689 CharP(&input_class_def),
690 CharP(&lookahead_class_def)}
692 return rule_set.apply (context, lookup_context);
695 inline bool sanitize (hb_sanitize_context_t *context) {
697 return coverage.sanitize (context, this)
698 && backtrackClassDef.sanitize (context, this)
699 && inputClassDef.sanitize (context, this)
700 && lookaheadClassDef.sanitize (context, this)
701 && ruleSet.sanitize (context, this);
705 USHORT format; /* Format identifier--format = 2 */
707 coverage; /* Offset to Coverage table--from
708 * beginning of table */
710 backtrackClassDef; /* Offset to glyph ClassDef table
711 * containing backtrack sequence
712 * data--from beginning of table */
714 inputClassDef; /* Offset to glyph ClassDef
715 * table containing input sequence
716 * data--from beginning of table */
718 lookaheadClassDef; /* Offset to glyph ClassDef table
719 * containing lookahead sequence
720 * data--from beginning of table */
721 OffsetArrayOf<ChainRuleSet>
722 ruleSet; /* Array of ChainRuleSet tables
723 * ordered by class */
725 DEFINE_SIZE_VAR (12, OffsetTo<ChainRuleSet>);
728 struct ChainContextFormat3
730 friend struct ChainContext;
734 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
737 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
739 unsigned int index = (this+input[0]) (IN_CURGLYPH ());
740 if (likely (index == NOT_COVERED))
743 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
744 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
745 struct ChainContextLookupContext lookup_context = {
746 {match_coverage, apply_func},
747 {CharP(this), CharP(this), CharP(this)}
749 return chain_context_lookup (context,
750 backtrack.len, (const USHORT *) backtrack.array(),
751 input.len, (const USHORT *) input.array() + 1,
752 lookahead.len, (const USHORT *) lookahead.array(),
753 lookup.len, lookup.array(),
758 inline bool sanitize (hb_sanitize_context_t *context) {
760 if (!backtrack.sanitize (context, this)) return false;
761 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
762 if (!input.sanitize (context, this)) return false;
763 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
764 if (!lookahead.sanitize (context, this)) return false;
765 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
766 return lookup.sanitize (context);
770 USHORT format; /* Format identifier--format = 3 */
771 OffsetArrayOf<Coverage>
772 backtrack; /* Array of coverage tables
773 * in backtracking sequence, in glyph
775 OffsetArrayOf<Coverage>
776 inputX ; /* Array of coverage
777 * tables in input sequence, in glyph
779 OffsetArrayOf<Coverage>
780 lookaheadX; /* Array of coverage tables
781 * in lookahead sequence, in glyph
783 ArrayOf<LookupRecord>
784 lookupX; /* Array of LookupRecords--in
787 DEFINE_SIZE_MIN (10);
793 inline bool apply (hb_apply_context_t *context, apply_lookup_func_t apply_func) const
797 case 1: return u.format1->apply (context, apply_func);
798 case 2: return u.format2->apply (context, apply_func);
799 case 3: return u.format3->apply (context, apply_func);
800 default:return false;
804 inline bool sanitize (hb_sanitize_context_t *context) {
806 if (!u.format.sanitize (context)) return false;
808 case 1: return u.format1->sanitize (context);
809 case 2: return u.format2->sanitize (context);
810 case 3: return u.format3->sanitize (context);
817 USHORT format; /* Format identifier */
818 ChainContextFormat1 format1[VAR];
819 ChainContextFormat2 format2[VAR];
820 ChainContextFormat3 format3[VAR];
825 struct ExtensionFormat1
827 friend struct Extension;
830 inline unsigned int get_type (void) const { return extensionLookupType; }
831 inline unsigned int get_offset (void) const { return extensionOffset; }
833 inline bool sanitize (hb_sanitize_context_t *context) {
835 return context->check_struct (this);
839 USHORT format; /* Format identifier. Set to 1. */
840 USHORT extensionLookupType; /* Lookup type of subtable referenced
841 * by ExtensionOffset (i.e. the
842 * extension subtable). */
843 ULONG extensionOffset; /* Offset to the extension subtable,
844 * of lookup type subtable. */
846 DEFINE_SIZE_STATIC (8);
851 inline unsigned int get_type (void) const
854 case 1: return u.format1->get_type ();
858 inline unsigned int get_offset (void) const
861 case 1: return u.format1->get_offset ();
866 inline bool sanitize (hb_sanitize_context_t *context) {
868 if (!u.format.sanitize (context)) return false;
870 case 1: return u.format1->sanitize (context);
877 USHORT format; /* Format identifier */
878 ExtensionFormat1 format1[VAR];
889 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
890 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
892 inline unsigned int get_script_count (void) const
893 { return (this+scriptList).len; }
894 inline const Tag& get_script_tag (unsigned int i) const
895 { return (this+scriptList).get_tag (i); }
896 inline unsigned int get_script_tags (unsigned int start_offset,
897 unsigned int *script_count /* IN/OUT */,
898 hb_tag_t *script_tags /* OUT */) const
899 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
900 inline const Script& get_script (unsigned int i) const
901 { return (this+scriptList)[i]; }
902 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
903 { return (this+scriptList).find_index (tag, index); }
905 inline unsigned int get_feature_count (void) const
906 { return (this+featureList).len; }
907 inline const Tag& get_feature_tag (unsigned int i) const
908 { return (this+featureList).get_tag (i); }
909 inline unsigned int get_feature_tags (unsigned int start_offset,
910 unsigned int *feature_count /* IN/OUT */,
911 hb_tag_t *feature_tags /* OUT */) const
912 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
913 inline const Feature& get_feature (unsigned int i) const
914 { return (this+featureList)[i]; }
915 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
916 { return (this+featureList).find_index (tag, index); }
918 inline unsigned int get_lookup_count (void) const
919 { return (this+lookupList).len; }
920 inline const Lookup& get_lookup (unsigned int i) const
921 { return (this+lookupList)[i]; }
923 inline bool sanitize (hb_sanitize_context_t *context) {
925 return version.sanitize (context) && likely (version.major == 1)
926 && scriptList.sanitize (context, this)
927 && featureList.sanitize (context, this)
928 && lookupList.sanitize (context, this);
932 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
935 scriptList; /* ScriptList table */
936 OffsetTo<FeatureList>
937 featureList; /* FeatureList table */
939 lookupList; /* LookupList table */
941 DEFINE_SIZE_STATIC (10);
945 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */