2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-table.hh"
37 /* unique ligature id */
38 /* component number in the ligature (0 = base) */
40 set_lig_props (hb_glyph_info_t &info, unsigned int lig_id, unsigned int lig_comp)
42 info.lig_props() = (lig_id << 4) | (lig_comp & 0x0F);
44 static inline unsigned int
45 get_lig_id (hb_glyph_info_t &info)
47 return info.lig_props() >> 4;
49 static inline unsigned int
50 get_lig_comp (hb_glyph_info_t &info)
52 return info.lig_props() & 0x0F;
55 static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) {
56 uint8_t lig_id = buffer->next_serial () & 0x0F;
57 if (unlikely (!lig_id))
58 lig_id = allocate_lig_id (buffer); /* in case of overflow */
64 #ifndef HB_DEBUG_CLOSURE
65 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
68 #define TRACE_CLOSURE() \
69 hb_auto_trace_t<HB_DEBUG_CLOSURE> trace (&c->debug_depth, "CLOSURE", this, HB_FUNC, "");
72 /* TODO Add TRACE_RETURN annotation for would_apply */
75 struct hb_closure_context_t
79 unsigned int nesting_level_left;
80 unsigned int debug_depth;
83 hb_closure_context_t (hb_face_t *face_,
85 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
86 face (face_), glyphs (glyphs_),
87 nesting_level_left (nesting_level_left_),
93 #ifndef HB_DEBUG_APPLY
94 #define HB_DEBUG_APPLY (HB_DEBUG+0)
97 #define TRACE_APPLY() \
98 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, HB_FUNC, "idx %d codepoint %u", c->buffer->cur().codepoint);
102 struct hb_apply_context_t
107 hb_direction_t direction;
108 hb_mask_t lookup_mask;
109 unsigned int nesting_level_left;
110 unsigned int lookup_props;
111 unsigned int property; /* propety of first glyph */
112 unsigned int debug_depth;
115 hb_apply_context_t (hb_font_t *font_,
117 hb_buffer_t *buffer_,
118 hb_mask_t lookup_mask_) :
119 font (font_), face (face_), buffer (buffer_),
120 direction (buffer_->props.direction),
121 lookup_mask (lookup_mask_),
122 nesting_level_left (MAX_NESTING_LEVEL),
123 lookup_props (0), property (0), debug_depth (0) {}
125 void set_lookup (const Lookup &l) {
126 lookup_props = l.get_props ();
129 struct mark_skipping_forward_iterator_t
131 inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
132 unsigned int start_index_,
133 unsigned int num_items_,
134 bool context_match = false)
138 num_items = num_items_;
139 mask = context_match ? -1 : c->lookup_mask;
140 syllable = context_match ? 0 : c->buffer->cur().syllable ();
141 end = c->buffer->len;
143 inline bool has_no_chance (void) const
145 return unlikely (num_items && idx + num_items >= end);
147 inline bool next (unsigned int *property_out,
148 unsigned int lookup_props)
150 assert (num_items > 0);
153 if (has_no_chance ())
156 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_props, property_out));
158 return (c->buffer->info[idx].mask & mask) && (!syllable || syllable == c->buffer->info[idx].syllable ());
160 inline bool next (unsigned int *property_out = NULL)
162 return next (property_out, c->lookup_props);
167 hb_apply_context_t *c;
168 unsigned int num_items;
174 struct mark_skipping_backward_iterator_t
176 inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
177 unsigned int start_index_,
178 unsigned int num_items_,
180 bool match_syllable_ = true)
184 num_items = num_items_;
185 mask = mask_ ? mask_ : c->lookup_mask;
186 syllable = match_syllable_ ? c->buffer->cur().syllable () : 0;
188 inline bool has_no_chance (void) const
190 return unlikely (idx < num_items);
192 inline bool prev (unsigned int *property_out,
193 unsigned int lookup_props)
195 assert (num_items > 0);
198 if (has_no_chance ())
201 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], lookup_props, property_out));
203 return (c->buffer->out_info[idx].mask & mask) && (!syllable || syllable == c->buffer->out_info[idx].syllable ());
205 inline bool prev (unsigned int *property_out = NULL)
207 return prev (property_out, c->lookup_props);
212 hb_apply_context_t *c;
213 unsigned int num_items;
218 inline bool should_mark_skip_current_glyph (void) const
220 return _hb_ot_layout_skip_mark (face, &buffer->cur(), lookup_props, NULL);
225 inline void replace_glyph (hb_codepoint_t glyph_index,
226 unsigned int klass = 0) const
228 buffer->cur().props_cache() = klass; /*XXX if has gdef? */
229 buffer->replace_glyph (glyph_index);
231 inline void replace_glyphs_be16 (unsigned int num_in,
232 unsigned int num_out,
233 const uint16_t *glyph_data_be,
234 unsigned int klass = 0) const
236 buffer->cur().props_cache() = klass; /* XXX if has gdef? */
237 buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be);
243 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
244 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
245 typedef void (*closure_lookup_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
246 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
248 struct ContextClosureFuncs
250 intersects_func_t intersects;
251 closure_lookup_func_t closure;
253 struct ContextApplyFuncs
256 apply_lookup_func_t apply;
259 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
261 return glyphs->has (value);
263 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
265 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
266 return class_def.intersects_class (glyphs, value);
268 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
270 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
271 return (data+coverage).intersects (glyphs);
274 static inline bool intersects_array (hb_closure_context_t *c,
276 const USHORT values[],
277 intersects_func_t intersects_func,
278 const void *intersects_data)
280 for (unsigned int i = 0; i < count; i++)
281 if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
287 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
289 return glyph_id == value;
292 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
294 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
295 return class_def.get_class (glyph_id) == value;
298 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
300 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
301 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
305 static inline bool match_input (hb_apply_context_t *c,
306 unsigned int count, /* Including the first glyph (not matched) */
307 const USHORT input[], /* Array of input values--start with second glyph */
308 match_func_t match_func,
309 const void *match_data,
310 unsigned int *end_offset = NULL)
312 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
313 if (skippy_iter.has_no_chance ())
316 for (unsigned int i = 1; i < count; i++)
318 if (!skippy_iter.next ())
321 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data)))
326 *end_offset = skippy_iter.idx - c->buffer->idx + 1;
331 static inline bool match_backtrack (hb_apply_context_t *c,
333 const USHORT backtrack[],
334 match_func_t match_func,
335 const void *match_data)
337 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true);
338 if (skippy_iter.has_no_chance ())
341 for (unsigned int i = 0; i < count; i++)
343 if (!skippy_iter.prev ())
346 if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
353 static inline bool match_lookahead (hb_apply_context_t *c,
355 const USHORT lookahead[],
356 match_func_t match_func,
357 const void *match_data,
360 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true);
361 if (skippy_iter.has_no_chance ())
364 for (unsigned int i = 0; i < count; i++)
366 if (!skippy_iter.next ())
369 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
380 inline bool sanitize (hb_sanitize_context_t *c) {
382 return TRACE_RETURN (c->check_struct (this));
385 USHORT sequenceIndex; /* Index into current glyph
386 * sequence--first glyph = 0 */
387 USHORT lookupListIndex; /* Lookup to apply to that
388 * position--zero--based */
390 DEFINE_SIZE_STATIC (4);
394 static inline void closure_lookup (hb_closure_context_t *c,
395 unsigned int lookupCount,
396 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
397 closure_lookup_func_t closure_func)
399 for (unsigned int i = 0; i < lookupCount; i++)
400 closure_func (c, lookupRecord->lookupListIndex);
403 static inline bool apply_lookup (hb_apply_context_t *c,
404 unsigned int count, /* Including the first glyph */
405 unsigned int lookupCount,
406 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
407 apply_lookup_func_t apply_func)
409 unsigned int end = c->buffer->len;
410 if (unlikely (count == 0 || c->buffer->idx + count > end))
413 /* TODO We don't support lookupRecord arrays that are not increasing:
414 * Should be easy for in_place ones at least. */
416 /* Note: If sublookup is reverse, it will underflow after the first loop
417 * and we jump out of it. Not entirely disastrous. So we don't check
418 * for reverse lookup here.
420 for (unsigned int i = 0; i < count; /* NOP */)
422 if (unlikely (c->buffer->idx == end))
424 while (c->should_mark_skip_current_glyph ())
426 /* No lookup applied for this index */
427 c->buffer->next_glyph ();
428 if (unlikely (c->buffer->idx == end))
432 if (lookupCount && i == lookupRecord->sequenceIndex)
434 unsigned int old_pos = c->buffer->idx;
437 bool done = apply_func (c, lookupRecord->lookupListIndex);
441 /* Err, this is wrong if the lookup jumped over some glyphs */
442 i += c->buffer->idx - old_pos;
443 if (unlikely (c->buffer->idx == end))
452 /* No lookup applied for this index */
453 c->buffer->next_glyph ();
463 /* Contextual lookups */
465 struct ContextClosureLookupContext
467 ContextClosureFuncs funcs;
468 const void *intersects_data;
471 struct ContextApplyLookupContext
473 ContextApplyFuncs funcs;
474 const void *match_data;
477 static inline void context_closure_lookup (hb_closure_context_t *c,
478 unsigned int inputCount, /* Including the first glyph (not matched) */
479 const USHORT input[], /* Array of input values--start with second glyph */
480 unsigned int lookupCount,
481 const LookupRecord lookupRecord[],
482 ContextClosureLookupContext &lookup_context)
484 if (intersects_array (c,
485 inputCount ? inputCount - 1 : 0, input,
486 lookup_context.funcs.intersects, lookup_context.intersects_data))
488 lookupCount, lookupRecord,
489 lookup_context.funcs.closure);
493 static inline bool context_apply_lookup (hb_apply_context_t *c,
494 unsigned int inputCount, /* Including the first glyph (not matched) */
495 const USHORT input[], /* Array of input values--start with second glyph */
496 unsigned int lookupCount,
497 const LookupRecord lookupRecord[],
498 ContextApplyLookupContext &lookup_context)
500 return match_input (c,
502 lookup_context.funcs.match, lookup_context.match_data)
505 lookupCount, lookupRecord,
506 lookup_context.funcs.apply);
511 friend struct RuleSet;
515 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
518 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
519 context_closure_lookup (c,
521 lookupCount, lookupRecord,
525 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
528 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
529 return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
533 inline bool sanitize (hb_sanitize_context_t *c) {
535 return inputCount.sanitize (c)
536 && lookupCount.sanitize (c)
537 && c->check_range (input,
538 input[0].static_size * inputCount
539 + lookupRecordX[0].static_size * lookupCount);
543 USHORT inputCount; /* Total number of glyphs in input
544 * glyph sequence--includes the first
546 USHORT lookupCount; /* Number of LookupRecords */
547 USHORT input[VAR]; /* Array of match inputs--start with
549 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
552 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
557 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
560 unsigned int num_rules = rule.len;
561 for (unsigned int i = 0; i < num_rules; i++)
562 (this+rule[i]).closure (c, lookup_context);
565 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
568 unsigned int num_rules = rule.len;
569 for (unsigned int i = 0; i < num_rules; i++)
571 if ((this+rule[i]).apply (c, lookup_context))
572 return TRACE_RETURN (true);
574 return TRACE_RETURN (false);
577 inline bool sanitize (hb_sanitize_context_t *c) {
579 return TRACE_RETURN (rule.sanitize (c, this));
584 rule; /* Array of Rule tables
585 * ordered by preference */
587 DEFINE_SIZE_ARRAY (2, rule);
591 struct ContextFormat1
593 friend struct Context;
597 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
601 const Coverage &cov = (this+coverage);
603 struct ContextClosureLookupContext lookup_context = {
604 {intersects_glyph, closure_func},
608 unsigned int count = ruleSet.len;
609 for (unsigned int i = 0; i < count; i++)
610 if (cov.intersects_coverage (c->glyphs, i)) {
611 const RuleSet &rule_set = this+ruleSet[i];
612 rule_set.closure (c, lookup_context);
616 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
619 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
620 if (likely (index == NOT_COVERED))
621 return TRACE_RETURN (false);
623 const RuleSet &rule_set = this+ruleSet[index];
624 struct ContextApplyLookupContext lookup_context = {
625 {match_glyph, apply_func},
628 return TRACE_RETURN (rule_set.apply (c, lookup_context));
631 inline bool sanitize (hb_sanitize_context_t *c) {
633 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
637 USHORT format; /* Format identifier--format = 1 */
639 coverage; /* Offset to Coverage table--from
640 * beginning of table */
641 OffsetArrayOf<RuleSet>
642 ruleSet; /* Array of RuleSet tables
643 * ordered by Coverage Index */
645 DEFINE_SIZE_ARRAY (6, ruleSet);
649 struct ContextFormat2
651 friend struct Context;
655 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
658 if (!(this+coverage).intersects (c->glyphs))
661 const ClassDef &class_def = this+classDef;
663 struct ContextClosureLookupContext lookup_context = {
664 {intersects_class, closure_func},
668 unsigned int count = ruleSet.len;
669 for (unsigned int i = 0; i < count; i++)
670 if (class_def.intersects_class (c->glyphs, i)) {
671 const RuleSet &rule_set = this+ruleSet[i];
672 rule_set.closure (c, lookup_context);
676 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
679 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
680 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
682 const ClassDef &class_def = this+classDef;
683 index = class_def (c->buffer->cur().codepoint);
684 const RuleSet &rule_set = this+ruleSet[index];
685 struct ContextApplyLookupContext lookup_context = {
686 {match_class, apply_func},
689 return TRACE_RETURN (rule_set.apply (c, lookup_context));
692 inline bool sanitize (hb_sanitize_context_t *c) {
694 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
698 USHORT format; /* Format identifier--format = 2 */
700 coverage; /* Offset to Coverage table--from
701 * beginning of table */
703 classDef; /* Offset to glyph ClassDef table--from
704 * beginning of table */
705 OffsetArrayOf<RuleSet>
706 ruleSet; /* Array of RuleSet tables
707 * ordered by class */
709 DEFINE_SIZE_ARRAY (8, ruleSet);
713 struct ContextFormat3
715 friend struct Context;
719 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
722 if (!(this+coverage[0]).intersects (c->glyphs))
725 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
726 struct ContextClosureLookupContext lookup_context = {
727 {intersects_coverage, closure_func},
730 context_closure_lookup (c,
731 glyphCount, (const USHORT *) (coverage + 1),
732 lookupCount, lookupRecord,
736 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
739 unsigned int index = (this+coverage[0]) (c->buffer->cur().codepoint);
740 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
742 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
743 struct ContextApplyLookupContext lookup_context = {
744 {match_coverage, apply_func},
747 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
750 inline bool sanitize (hb_sanitize_context_t *c) {
752 if (!c->check_struct (this)) return TRACE_RETURN (false);
753 unsigned int count = glyphCount;
754 if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false);
755 for (unsigned int i = 0; i < count; i++)
756 if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false);
757 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
758 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
762 USHORT format; /* Format identifier--format = 3 */
763 USHORT glyphCount; /* Number of glyphs in the input glyph
765 USHORT lookupCount; /* Number of LookupRecords */
767 coverage[VAR]; /* Array of offsets to Coverage
768 * table in glyph sequence order */
769 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
772 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
779 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
783 case 1: u.format1.closure (c, closure_func); break;
784 case 2: u.format2.closure (c, closure_func); break;
785 case 3: u.format3.closure (c, closure_func); break;
790 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
794 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
795 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
796 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
797 default:return TRACE_RETURN (false);
801 inline bool sanitize (hb_sanitize_context_t *c) {
803 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
805 case 1: return TRACE_RETURN (u.format1.sanitize (c));
806 case 2: return TRACE_RETURN (u.format2.sanitize (c));
807 case 3: return TRACE_RETURN (u.format3.sanitize (c));
808 default:return TRACE_RETURN (true);
814 USHORT format; /* Format identifier */
815 ContextFormat1 format1;
816 ContextFormat2 format2;
817 ContextFormat3 format3;
822 /* Chaining Contextual lookups */
824 struct ChainContextClosureLookupContext
826 ContextClosureFuncs funcs;
827 const void *intersects_data[3];
830 struct ChainContextApplyLookupContext
832 ContextApplyFuncs funcs;
833 const void *match_data[3];
836 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
837 unsigned int backtrackCount,
838 const USHORT backtrack[],
839 unsigned int inputCount, /* Including the first glyph (not matched) */
840 const USHORT input[], /* Array of input values--start with second glyph */
841 unsigned int lookaheadCount,
842 const USHORT lookahead[],
843 unsigned int lookupCount,
844 const LookupRecord lookupRecord[],
845 ChainContextClosureLookupContext &lookup_context)
847 if (intersects_array (c,
848 backtrackCount, backtrack,
849 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
850 && intersects_array (c,
851 inputCount ? inputCount - 1 : 0, input,
852 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
853 && intersects_array (c,
854 lookaheadCount, lookahead,
855 lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
857 lookupCount, lookupRecord,
858 lookup_context.funcs.closure);
861 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
862 unsigned int backtrackCount,
863 const USHORT backtrack[],
864 unsigned int inputCount, /* Including the first glyph (not matched) */
865 const USHORT input[], /* Array of input values--start with second glyph */
866 unsigned int lookaheadCount,
867 const USHORT lookahead[],
868 unsigned int lookupCount,
869 const LookupRecord lookupRecord[],
870 ChainContextApplyLookupContext &lookup_context)
872 unsigned int lookahead_offset;
873 return match_backtrack (c,
874 backtrackCount, backtrack,
875 lookup_context.funcs.match, lookup_context.match_data[0])
878 lookup_context.funcs.match, lookup_context.match_data[1],
880 && match_lookahead (c,
881 lookaheadCount, lookahead,
882 lookup_context.funcs.match, lookup_context.match_data[2],
886 lookupCount, lookupRecord,
887 lookup_context.funcs.apply);
892 friend struct ChainRuleSet;
896 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
899 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
900 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
901 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
902 chain_context_closure_lookup (c,
903 backtrack.len, backtrack.array,
904 input.len, input.array,
905 lookahead.len, lookahead.array,
906 lookup.len, lookup.array,
910 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
913 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
914 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
915 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
916 return TRACE_RETURN (chain_context_apply_lookup (c,
917 backtrack.len, backtrack.array,
918 input.len, input.array,
919 lookahead.len, lookahead.array, lookup.len,
920 lookup.array, lookup_context));
924 inline bool sanitize (hb_sanitize_context_t *c) {
926 if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
927 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
928 if (!input.sanitize (c)) return TRACE_RETURN (false);
929 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
930 if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
931 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
932 return TRACE_RETURN (lookup.sanitize (c));
937 backtrack; /* Array of backtracking values
938 * (to be matched before the input
940 HeadlessArrayOf<USHORT>
941 inputX; /* Array of input values (start with
944 lookaheadX; /* Array of lookahead values's (to be
945 * matched after the input sequence) */
946 ArrayOf<LookupRecord>
947 lookupX; /* Array of LookupRecords--in
955 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
958 unsigned int num_rules = rule.len;
959 for (unsigned int i = 0; i < num_rules; i++)
960 (this+rule[i]).closure (c, lookup_context);
963 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
966 unsigned int num_rules = rule.len;
967 for (unsigned int i = 0; i < num_rules; i++)
968 if ((this+rule[i]).apply (c, lookup_context))
969 return TRACE_RETURN (true);
971 return TRACE_RETURN (false);
974 inline bool sanitize (hb_sanitize_context_t *c) {
976 return TRACE_RETURN (rule.sanitize (c, this));
980 OffsetArrayOf<ChainRule>
981 rule; /* Array of ChainRule tables
982 * ordered by preference */
984 DEFINE_SIZE_ARRAY (2, rule);
987 struct ChainContextFormat1
989 friend struct ChainContext;
993 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
996 const Coverage &cov = (this+coverage);
998 struct ChainContextClosureLookupContext lookup_context = {
999 {intersects_glyph, closure_func},
1003 unsigned int count = ruleSet.len;
1004 for (unsigned int i = 0; i < count; i++)
1005 if (cov.intersects_coverage (c->glyphs, i)) {
1006 const ChainRuleSet &rule_set = this+ruleSet[i];
1007 rule_set.closure (c, lookup_context);
1011 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1014 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1015 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1017 const ChainRuleSet &rule_set = this+ruleSet[index];
1018 struct ChainContextApplyLookupContext lookup_context = {
1019 {match_glyph, apply_func},
1022 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1025 inline bool sanitize (hb_sanitize_context_t *c) {
1027 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1031 USHORT format; /* Format identifier--format = 1 */
1033 coverage; /* Offset to Coverage table--from
1034 * beginning of table */
1035 OffsetArrayOf<ChainRuleSet>
1036 ruleSet; /* Array of ChainRuleSet tables
1037 * ordered by Coverage Index */
1039 DEFINE_SIZE_ARRAY (6, ruleSet);
1042 struct ChainContextFormat2
1044 friend struct ChainContext;
1048 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1051 if (!(this+coverage).intersects (c->glyphs))
1054 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1055 const ClassDef &input_class_def = this+inputClassDef;
1056 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1058 struct ChainContextClosureLookupContext lookup_context = {
1059 {intersects_class, closure_func},
1060 {&backtrack_class_def,
1062 &lookahead_class_def}
1065 unsigned int count = ruleSet.len;
1066 for (unsigned int i = 0; i < count; i++)
1067 if (input_class_def.intersects_class (c->glyphs, i)) {
1068 const ChainRuleSet &rule_set = this+ruleSet[i];
1069 rule_set.closure (c, lookup_context);
1073 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1076 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1077 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1079 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1080 const ClassDef &input_class_def = this+inputClassDef;
1081 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1083 index = input_class_def (c->buffer->cur().codepoint);
1084 const ChainRuleSet &rule_set = this+ruleSet[index];
1085 struct ChainContextApplyLookupContext lookup_context = {
1086 {match_class, apply_func},
1087 {&backtrack_class_def,
1089 &lookahead_class_def}
1091 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1094 inline bool sanitize (hb_sanitize_context_t *c) {
1096 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
1097 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
1098 ruleSet.sanitize (c, this));
1102 USHORT format; /* Format identifier--format = 2 */
1104 coverage; /* Offset to Coverage table--from
1105 * beginning of table */
1107 backtrackClassDef; /* Offset to glyph ClassDef table
1108 * containing backtrack sequence
1109 * data--from beginning of table */
1111 inputClassDef; /* Offset to glyph ClassDef
1112 * table containing input sequence
1113 * data--from beginning of table */
1115 lookaheadClassDef; /* Offset to glyph ClassDef table
1116 * containing lookahead sequence
1117 * data--from beginning of table */
1118 OffsetArrayOf<ChainRuleSet>
1119 ruleSet; /* Array of ChainRuleSet tables
1120 * ordered by class */
1122 DEFINE_SIZE_ARRAY (12, ruleSet);
1125 struct ChainContextFormat3
1127 friend struct ChainContext;
1131 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1134 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1136 if (!(this+input[0]).intersects (c->glyphs))
1139 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1140 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1141 struct ChainContextClosureLookupContext lookup_context = {
1142 {intersects_coverage, closure_func},
1145 chain_context_closure_lookup (c,
1146 backtrack.len, (const USHORT *) backtrack.array,
1147 input.len, (const USHORT *) input.array + 1,
1148 lookahead.len, (const USHORT *) lookahead.array,
1149 lookup.len, lookup.array,
1153 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1156 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1158 unsigned int index = (this+input[0]) (c->buffer->cur().codepoint);
1159 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1161 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1162 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1163 struct ChainContextApplyLookupContext lookup_context = {
1164 {match_coverage, apply_func},
1167 return TRACE_RETURN (chain_context_apply_lookup (c,
1168 backtrack.len, (const USHORT *) backtrack.array,
1169 input.len, (const USHORT *) input.array + 1,
1170 lookahead.len, (const USHORT *) lookahead.array,
1171 lookup.len, lookup.array, lookup_context));
1174 inline bool sanitize (hb_sanitize_context_t *c) {
1176 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
1177 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1178 if (!input.sanitize (c, this)) return TRACE_RETURN (false);
1179 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1180 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
1181 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1182 return TRACE_RETURN (lookup.sanitize (c));
1186 USHORT format; /* Format identifier--format = 3 */
1187 OffsetArrayOf<Coverage>
1188 backtrack; /* Array of coverage tables
1189 * in backtracking sequence, in glyph
1191 OffsetArrayOf<Coverage>
1192 inputX ; /* Array of coverage
1193 * tables in input sequence, in glyph
1195 OffsetArrayOf<Coverage>
1196 lookaheadX; /* Array of coverage tables
1197 * in lookahead sequence, in glyph
1199 ArrayOf<LookupRecord>
1200 lookupX; /* Array of LookupRecords--in
1203 DEFINE_SIZE_MIN (10);
1210 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1214 case 1: u.format1.closure (c, closure_func); break;
1215 case 2: u.format2.closure (c, closure_func); break;
1216 case 3: u.format3.closure (c, closure_func); break;
1221 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1225 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
1226 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
1227 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
1228 default:return TRACE_RETURN (false);
1232 inline bool sanitize (hb_sanitize_context_t *c) {
1234 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1236 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1237 case 2: return TRACE_RETURN (u.format2.sanitize (c));
1238 case 3: return TRACE_RETURN (u.format3.sanitize (c));
1239 default:return TRACE_RETURN (true);
1245 USHORT format; /* Format identifier */
1246 ChainContextFormat1 format1;
1247 ChainContextFormat2 format2;
1248 ChainContextFormat3 format3;
1253 struct ExtensionFormat1
1255 friend struct Extension;
1258 inline unsigned int get_type (void) const { return extensionLookupType; }
1259 inline unsigned int get_offset (void) const { return extensionOffset; }
1261 inline bool sanitize (hb_sanitize_context_t *c) {
1263 return TRACE_RETURN (c->check_struct (this));
1267 USHORT format; /* Format identifier. Set to 1. */
1268 USHORT extensionLookupType; /* Lookup type of subtable referenced
1269 * by ExtensionOffset (i.e. the
1270 * extension subtable). */
1271 ULONG extensionOffset; /* Offset to the extension subtable,
1272 * of lookup type subtable. */
1274 DEFINE_SIZE_STATIC (8);
1279 inline unsigned int get_type (void) const
1282 case 1: return u.format1.get_type ();
1286 inline unsigned int get_offset (void) const
1289 case 1: return u.format1.get_offset ();
1294 inline bool sanitize (hb_sanitize_context_t *c) {
1296 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1298 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1299 default:return TRACE_RETURN (true);
1305 USHORT format; /* Format identifier */
1306 ExtensionFormat1 format1;
1317 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
1318 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
1320 inline unsigned int get_script_count (void) const
1321 { return (this+scriptList).len; }
1322 inline const Tag& get_script_tag (unsigned int i) const
1323 { return (this+scriptList).get_tag (i); }
1324 inline unsigned int get_script_tags (unsigned int start_offset,
1325 unsigned int *script_count /* IN/OUT */,
1326 hb_tag_t *script_tags /* OUT */) const
1327 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1328 inline const Script& get_script (unsigned int i) const
1329 { return (this+scriptList)[i]; }
1330 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1331 { return (this+scriptList).find_index (tag, index); }
1333 inline unsigned int get_feature_count (void) const
1334 { return (this+featureList).len; }
1335 inline const Tag& get_feature_tag (unsigned int i) const
1336 { return (this+featureList).get_tag (i); }
1337 inline unsigned int get_feature_tags (unsigned int start_offset,
1338 unsigned int *feature_count /* IN/OUT */,
1339 hb_tag_t *feature_tags /* OUT */) const
1340 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1341 inline const Feature& get_feature (unsigned int i) const
1342 { return (this+featureList)[i]; }
1343 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1344 { return (this+featureList).find_index (tag, index); }
1346 inline unsigned int get_lookup_count (void) const
1347 { return (this+lookupList).len; }
1348 inline const Lookup& get_lookup (unsigned int i) const
1349 { return (this+lookupList)[i]; }
1351 inline bool sanitize (hb_sanitize_context_t *c) {
1353 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
1354 scriptList.sanitize (c, this) &&
1355 featureList.sanitize (c, this) &&
1356 lookupList.sanitize (c, this));
1360 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
1362 OffsetTo<ScriptList>
1363 scriptList; /* ScriptList table */
1364 OffsetTo<FeatureList>
1365 featureList; /* FeatureList table */
1366 OffsetTo<LookupList>
1367 lookupList; /* LookupList table */
1369 DEFINE_SIZE_STATIC (10);
1374 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */