2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-private.hh"
37 /* buffer var allocations */
38 #define lig_id() var2.u8[2] /* unique ligature id */
39 #define lig_comp() var2.u8[3] /* component number in the ligature (0 = base) */
41 static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) {
42 uint8_t lig_id = buffer->next_serial ();
43 if (unlikely (!lig_id)) lig_id = buffer->next_serial (); /* in case of overflow */
49 #ifndef HB_DEBUG_APPLY
50 #define HB_DEBUG_APPLY (HB_DEBUG+0)
53 #define TRACE_APPLY() \
54 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, NULL, HB_FUNC);
59 struct hb_apply_context_t
61 unsigned int debug_depth;
65 hb_direction_t direction;
66 hb_mask_t lookup_mask;
67 unsigned int context_length;
68 unsigned int nesting_level_left;
69 unsigned int lookup_props;
70 unsigned int property; /* propety of first glyph */
73 inline void replace_glyph (hb_codepoint_t glyph_index) const
76 buffer->replace_glyph (glyph_index);
78 inline void replace_glyphs_be16 (unsigned int num_in,
80 const uint16_t *glyph_data_be) const
83 buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be);
86 inline void guess_glyph_class (unsigned int klass)
88 /* XXX if ! has gdef */
89 buffer->info[buffer->idx].props_cache() = klass;
93 inline void clear_property (void) const
96 buffer->info[buffer->idx].props_cache() = 0;
102 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
103 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
108 apply_lookup_func_t apply;
112 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
114 return glyph_id == value;
117 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
119 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
120 return class_def.get_class (glyph_id) == value;
123 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
125 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
126 return (data+coverage) (glyph_id) != NOT_COVERED;
130 static inline bool match_input (hb_apply_context_t *c,
131 unsigned int count, /* Including the first glyph (not matched) */
132 const USHORT input[], /* Array of input values--start with second glyph */
133 match_func_t match_func,
134 const void *match_data,
135 unsigned int *context_length_out)
138 unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
139 if (unlikely (c->buffer->idx + count > end))
142 for (i = 1, j = c->buffer->idx + 1; i < count; i++, j++)
144 while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[j], c->lookup_props, NULL))
146 if (unlikely (j + count - i == end))
151 if (likely (!match_func (c->buffer->info[j].codepoint, input[i - 1], match_data)))
155 *context_length_out = j - c->buffer->idx;
160 static inline bool match_backtrack (hb_apply_context_t *c,
162 const USHORT backtrack[],
163 match_func_t match_func,
164 const void *match_data)
166 if (unlikely (c->buffer->backtrack_len () < count))
169 for (unsigned int i = 0, j = c->buffer->backtrack_len () - 1; i < count; i++, j--)
171 while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[j], c->lookup_props, NULL))
173 if (unlikely (j + 1 == count - i))
178 if (likely (!match_func (c->buffer->out_info[j].codepoint, backtrack[i], match_data)))
185 static inline bool match_lookahead (hb_apply_context_t *c,
187 const USHORT lookahead[],
188 match_func_t match_func,
189 const void *match_data,
193 unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
194 if (unlikely (c->buffer->idx + offset + count > end))
197 for (i = 0, j = c->buffer->idx + offset; i < count; i++, j++)
199 while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[j], c->lookup_props, NULL))
201 if (unlikely (j + count - i == end))
206 if (likely (!match_func (c->buffer->info[j].codepoint, lookahead[i], match_data)))
218 inline bool sanitize (hb_sanitize_context_t *c) {
220 return c->check_struct (this);
223 USHORT sequenceIndex; /* Index into current glyph
224 * sequence--first glyph = 0 */
225 USHORT lookupListIndex; /* Lookup to apply to that
226 * position--zero--based */
228 DEFINE_SIZE_STATIC (4);
234 static inline bool apply_lookup (hb_apply_context_t *c,
235 unsigned int count, /* Including the first glyph */
236 unsigned int lookupCount,
237 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
238 apply_lookup_func_t apply_func)
240 unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length);
241 if (unlikely (count == 0 || c->buffer->idx + count > end))
244 /* TODO We don't support lookupRecord arrays that are not increasing:
245 * Should be easy for in_place ones at least. */
247 /* Note: If sublookup is reverse, it will underflow after the first loop
248 * and we jump out of it. Not entirely disastrous. So we don't check
249 * for reverse lookup here.
251 for (unsigned int i = 0; i < count; /* NOP */)
253 while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[c->buffer->idx], c->lookup_props, NULL))
255 if (unlikely (c->buffer->idx == end))
257 /* No lookup applied for this index */
258 c->buffer->next_glyph ();
261 if (lookupCount && i == lookupRecord->sequenceIndex)
263 unsigned int old_pos = c->buffer->idx;
266 bool done = apply_func (c, lookupRecord->lookupListIndex);
270 /* Err, this is wrong if the lookup jumped over some glyphs */
271 i += c->buffer->idx - old_pos;
272 if (unlikely (c->buffer->idx == end))
281 /* No lookup applied for this index */
282 c->buffer->next_glyph ();
293 /* Contextual lookups */
295 struct ContextLookupContext
298 const void *match_data;
301 static inline bool context_lookup (hb_apply_context_t *c,
302 unsigned int inputCount, /* Including the first glyph (not matched) */
303 const USHORT input[], /* Array of input values--start with second glyph */
304 unsigned int lookupCount,
305 const LookupRecord lookupRecord[],
306 ContextLookupContext &lookup_context)
308 hb_apply_context_t new_context = *c;
309 return match_input (c,
311 lookup_context.funcs.match, lookup_context.match_data,
312 &new_context.context_length)
313 && apply_lookup (&new_context,
315 lookupCount, lookupRecord,
316 lookup_context.funcs.apply);
321 friend struct RuleSet;
324 inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
327 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
328 return context_lookup (c,
330 lookupCount, lookupRecord,
335 inline bool sanitize (hb_sanitize_context_t *c) {
337 return inputCount.sanitize (c)
338 && lookupCount.sanitize (c)
339 && c->check_range (input,
340 input[0].static_size * inputCount
341 + lookupRecordX[0].static_size * lookupCount);
345 USHORT inputCount; /* Total number of glyphs in input
346 * glyph sequence--includes the first
348 USHORT lookupCount; /* Number of LookupRecords */
349 USHORT input[VAR]; /* Array of match inputs--start with
351 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
354 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
359 inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
362 unsigned int num_rules = rule.len;
363 for (unsigned int i = 0; i < num_rules; i++)
365 if ((this+rule[i]).apply (c, lookup_context))
372 inline bool sanitize (hb_sanitize_context_t *c) {
374 return rule.sanitize (c, this);
379 rule; /* Array of Rule tables
380 * ordered by preference */
382 DEFINE_SIZE_ARRAY (2, rule);
386 struct ContextFormat1
388 friend struct Context;
391 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
394 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
395 if (likely (index == NOT_COVERED))
398 const RuleSet &rule_set = this+ruleSet[index];
399 struct ContextLookupContext lookup_context = {
400 {match_glyph, apply_func},
403 return rule_set.apply (c, lookup_context);
406 inline bool sanitize (hb_sanitize_context_t *c) {
408 return coverage.sanitize (c, this)
409 && ruleSet.sanitize (c, this);
413 USHORT format; /* Format identifier--format = 1 */
415 coverage; /* Offset to Coverage table--from
416 * beginning of table */
417 OffsetArrayOf<RuleSet>
418 ruleSet; /* Array of RuleSet tables
419 * ordered by Coverage Index */
421 DEFINE_SIZE_ARRAY (6, ruleSet);
425 struct ContextFormat2
427 friend struct Context;
430 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
433 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
434 if (likely (index == NOT_COVERED))
437 const ClassDef &class_def = this+classDef;
438 index = class_def (c->buffer->info[c->buffer->idx].codepoint);
439 const RuleSet &rule_set = this+ruleSet[index];
440 struct ContextLookupContext lookup_context = {
441 {match_class, apply_func},
444 return rule_set.apply (c, lookup_context);
447 inline bool sanitize (hb_sanitize_context_t *c) {
449 return coverage.sanitize (c, this)
450 && classDef.sanitize (c, this)
451 && ruleSet.sanitize (c, this);
455 USHORT format; /* Format identifier--format = 2 */
457 coverage; /* Offset to Coverage table--from
458 * beginning of table */
460 classDef; /* Offset to glyph ClassDef table--from
461 * beginning of table */
462 OffsetArrayOf<RuleSet>
463 ruleSet; /* Array of RuleSet tables
464 * ordered by class */
466 DEFINE_SIZE_ARRAY (8, ruleSet);
470 struct ContextFormat3
472 friend struct Context;
475 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
478 unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->idx].codepoint);
479 if (likely (index == NOT_COVERED))
482 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
483 struct ContextLookupContext lookup_context = {
484 {match_coverage, apply_func},
487 return context_lookup (c,
488 glyphCount, (const USHORT *) (coverage + 1),
489 lookupCount, lookupRecord,
493 inline bool sanitize (hb_sanitize_context_t *c) {
495 if (!c->check_struct (this)) return false;
496 unsigned int count = glyphCount;
497 if (!c->check_array (coverage, coverage[0].static_size, count)) return false;
498 for (unsigned int i = 0; i < count; i++)
499 if (!coverage[i].sanitize (c, this)) return false;
500 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
501 return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
505 USHORT format; /* Format identifier--format = 3 */
506 USHORT glyphCount; /* Number of glyphs in the input glyph
508 USHORT lookupCount; /* Number of LookupRecords */
510 coverage[VAR]; /* Array of offsets to Coverage
511 * table in glyph sequence order */
512 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
515 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
521 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
525 case 1: return u.format1.apply (c, apply_func);
526 case 2: return u.format2.apply (c, apply_func);
527 case 3: return u.format3.apply (c, apply_func);
528 default:return false;
532 inline bool sanitize (hb_sanitize_context_t *c) {
534 if (!u.format.sanitize (c)) return false;
536 case 1: return u.format1.sanitize (c);
537 case 2: return u.format2.sanitize (c);
538 case 3: return u.format3.sanitize (c);
545 USHORT format; /* Format identifier */
546 ContextFormat1 format1;
547 ContextFormat2 format2;
548 ContextFormat3 format3;
553 /* Chaining Contextual lookups */
555 struct ChainContextLookupContext
558 const void *match_data[3];
561 static inline bool chain_context_lookup (hb_apply_context_t *c,
562 unsigned int backtrackCount,
563 const USHORT backtrack[],
564 unsigned int inputCount, /* Including the first glyph (not matched) */
565 const USHORT input[], /* Array of input values--start with second glyph */
566 unsigned int lookaheadCount,
567 const USHORT lookahead[],
568 unsigned int lookupCount,
569 const LookupRecord lookupRecord[],
570 ChainContextLookupContext &lookup_context)
573 if (unlikely (c->buffer->backtrack_len () < backtrackCount ||
574 c->buffer->idx + inputCount + lookaheadCount > c->buffer->len ||
575 inputCount + lookaheadCount > c->context_length))
578 hb_apply_context_t new_context = *c;
579 return match_backtrack (c,
580 backtrackCount, backtrack,
581 lookup_context.funcs.match, lookup_context.match_data[0])
584 lookup_context.funcs.match, lookup_context.match_data[1],
585 &new_context.context_length)
586 && match_lookahead (c,
587 lookaheadCount, lookahead,
588 lookup_context.funcs.match, lookup_context.match_data[2],
589 new_context.context_length)
590 && apply_lookup (&new_context,
592 lookupCount, lookupRecord,
593 lookup_context.funcs.apply);
598 friend struct ChainRuleSet;
601 inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
604 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
605 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
606 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
607 return chain_context_lookup (c,
608 backtrack.len, backtrack.array,
609 input.len, input.array,
610 lookahead.len, lookahead.array,
611 lookup.len, lookup.array,
616 inline bool sanitize (hb_sanitize_context_t *c) {
618 if (!backtrack.sanitize (c)) return false;
619 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
620 if (!input.sanitize (c)) return false;
621 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
622 if (!lookahead.sanitize (c)) return false;
623 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
624 return lookup.sanitize (c);
629 backtrack; /* Array of backtracking values
630 * (to be matched before the input
632 HeadlessArrayOf<USHORT>
633 inputX; /* Array of input values (start with
636 lookaheadX; /* Array of lookahead values's (to be
637 * matched after the input sequence) */
638 ArrayOf<LookupRecord>
639 lookupX; /* Array of LookupRecords--in
647 inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
650 unsigned int num_rules = rule.len;
651 for (unsigned int i = 0; i < num_rules; i++)
653 if ((this+rule[i]).apply (c, lookup_context))
660 inline bool sanitize (hb_sanitize_context_t *c) {
662 return rule.sanitize (c, this);
666 OffsetArrayOf<ChainRule>
667 rule; /* Array of ChainRule tables
668 * ordered by preference */
670 DEFINE_SIZE_ARRAY (2, rule);
673 struct ChainContextFormat1
675 friend struct ChainContext;
678 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
681 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
682 if (likely (index == NOT_COVERED))
685 const ChainRuleSet &rule_set = this+ruleSet[index];
686 struct ChainContextLookupContext lookup_context = {
687 {match_glyph, apply_func},
690 return rule_set.apply (c, lookup_context);
693 inline bool sanitize (hb_sanitize_context_t *c) {
695 return coverage.sanitize (c, this)
696 && ruleSet.sanitize (c, this);
700 USHORT format; /* Format identifier--format = 1 */
702 coverage; /* Offset to Coverage table--from
703 * beginning of table */
704 OffsetArrayOf<ChainRuleSet>
705 ruleSet; /* Array of ChainRuleSet tables
706 * ordered by Coverage Index */
708 DEFINE_SIZE_ARRAY (6, ruleSet);
711 struct ChainContextFormat2
713 friend struct ChainContext;
716 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
719 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepoint);
720 if (likely (index == NOT_COVERED))
723 const ClassDef &backtrack_class_def = this+backtrackClassDef;
724 const ClassDef &input_class_def = this+inputClassDef;
725 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
727 index = input_class_def (c->buffer->info[c->buffer->idx].codepoint);
728 const ChainRuleSet &rule_set = this+ruleSet[index];
729 struct ChainContextLookupContext lookup_context = {
730 {match_class, apply_func},
731 {&backtrack_class_def,
733 &lookahead_class_def}
735 return rule_set.apply (c, lookup_context);
738 inline bool sanitize (hb_sanitize_context_t *c) {
740 return coverage.sanitize (c, this)
741 && backtrackClassDef.sanitize (c, this)
742 && inputClassDef.sanitize (c, this)
743 && lookaheadClassDef.sanitize (c, this)
744 && ruleSet.sanitize (c, this);
748 USHORT format; /* Format identifier--format = 2 */
750 coverage; /* Offset to Coverage table--from
751 * beginning of table */
753 backtrackClassDef; /* Offset to glyph ClassDef table
754 * containing backtrack sequence
755 * data--from beginning of table */
757 inputClassDef; /* Offset to glyph ClassDef
758 * table containing input sequence
759 * data--from beginning of table */
761 lookaheadClassDef; /* Offset to glyph ClassDef table
762 * containing lookahead sequence
763 * data--from beginning of table */
764 OffsetArrayOf<ChainRuleSet>
765 ruleSet; /* Array of ChainRuleSet tables
766 * ordered by class */
768 DEFINE_SIZE_ARRAY (12, ruleSet);
771 struct ChainContextFormat3
773 friend struct ChainContext;
777 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
780 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
782 unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->idx].codepoint);
783 if (likely (index == NOT_COVERED))
786 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
787 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
788 struct ChainContextLookupContext lookup_context = {
789 {match_coverage, apply_func},
792 return chain_context_lookup (c,
793 backtrack.len, (const USHORT *) backtrack.array,
794 input.len, (const USHORT *) input.array + 1,
795 lookahead.len, (const USHORT *) lookahead.array,
796 lookup.len, lookup.array,
800 inline bool sanitize (hb_sanitize_context_t *c) {
802 if (!backtrack.sanitize (c, this)) return false;
803 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
804 if (!input.sanitize (c, this)) return false;
805 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
806 if (!lookahead.sanitize (c, this)) return false;
807 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
808 return lookup.sanitize (c);
812 USHORT format; /* Format identifier--format = 3 */
813 OffsetArrayOf<Coverage>
814 backtrack; /* Array of coverage tables
815 * in backtracking sequence, in glyph
817 OffsetArrayOf<Coverage>
818 inputX ; /* Array of coverage
819 * tables in input sequence, in glyph
821 OffsetArrayOf<Coverage>
822 lookaheadX; /* Array of coverage tables
823 * in lookahead sequence, in glyph
825 ArrayOf<LookupRecord>
826 lookupX; /* Array of LookupRecords--in
829 DEFINE_SIZE_MIN (10);
835 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
839 case 1: return u.format1.apply (c, apply_func);
840 case 2: return u.format2.apply (c, apply_func);
841 case 3: return u.format3.apply (c, apply_func);
842 default:return false;
846 inline bool sanitize (hb_sanitize_context_t *c) {
848 if (!u.format.sanitize (c)) return false;
850 case 1: return u.format1.sanitize (c);
851 case 2: return u.format2.sanitize (c);
852 case 3: return u.format3.sanitize (c);
859 USHORT format; /* Format identifier */
860 ChainContextFormat1 format1;
861 ChainContextFormat2 format2;
862 ChainContextFormat3 format3;
867 struct ExtensionFormat1
869 friend struct Extension;
872 inline unsigned int get_type (void) const { return extensionLookupType; }
873 inline unsigned int get_offset (void) const { return extensionOffset; }
875 inline bool sanitize (hb_sanitize_context_t *c) {
877 return c->check_struct (this);
881 USHORT format; /* Format identifier. Set to 1. */
882 USHORT extensionLookupType; /* Lookup type of subtable referenced
883 * by ExtensionOffset (i.e. the
884 * extension subtable). */
885 ULONG extensionOffset; /* Offset to the extension subtable,
886 * of lookup type subtable. */
888 DEFINE_SIZE_STATIC (8);
893 inline unsigned int get_type (void) const
896 case 1: return u.format1.get_type ();
900 inline unsigned int get_offset (void) const
903 case 1: return u.format1.get_offset ();
908 inline bool sanitize (hb_sanitize_context_t *c) {
910 if (!u.format.sanitize (c)) return false;
912 case 1: return u.format1.sanitize (c);
919 USHORT format; /* Format identifier */
920 ExtensionFormat1 format1;
931 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
932 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
934 inline unsigned int get_script_count (void) const
935 { return (this+scriptList).len; }
936 inline const Tag& get_script_tag (unsigned int i) const
937 { return (this+scriptList).get_tag (i); }
938 inline unsigned int get_script_tags (unsigned int start_offset,
939 unsigned int *script_count /* IN/OUT */,
940 hb_tag_t *script_tags /* OUT */) const
941 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
942 inline const Script& get_script (unsigned int i) const
943 { return (this+scriptList)[i]; }
944 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
945 { return (this+scriptList).find_index (tag, index); }
947 inline unsigned int get_feature_count (void) const
948 { return (this+featureList).len; }
949 inline const Tag& get_feature_tag (unsigned int i) const
950 { return (this+featureList).get_tag (i); }
951 inline unsigned int get_feature_tags (unsigned int start_offset,
952 unsigned int *feature_count /* IN/OUT */,
953 hb_tag_t *feature_tags /* OUT */) const
954 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
955 inline const Feature& get_feature (unsigned int i) const
956 { return (this+featureList)[i]; }
957 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
958 { return (this+featureList).find_index (tag, index); }
960 inline unsigned int get_lookup_count (void) const
961 { return (this+lookupList).len; }
962 inline const Lookup& get_lookup (unsigned int i) const
963 { return (this+lookupList)[i]; }
965 inline bool sanitize (hb_sanitize_context_t *c) {
967 return version.sanitize (c) && likely (version.major == 1)
968 && scriptList.sanitize (c, this)
969 && featureList.sanitize (c, this)
970 && lookupList.sanitize (c, this);
974 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
977 scriptList; /* ScriptList table */
978 OffsetTo<FeatureList>
979 featureList; /* FeatureList table */
981 lookupList; /* LookupList table */
983 DEFINE_SIZE_STATIC (10);
989 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */