2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
32 #include "hb-ot-layout-gsubgpos-private.hh"
38 /* buffer **position** var allocations */
39 #define attach_lookback() var.u16[0] /* number of glyphs to go back to attach this glyph to its base */
40 #define cursive_chain() var.i16[1] /* character to which this connects, may be positive or negative */
43 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
47 typedef Value ValueRecord[VAR];
49 struct ValueFormat : USHORT
52 xPlacement = 0x0001, /* Includes horizontal adjustment for placement */
53 yPlacement = 0x0002, /* Includes vertical adjustment for placement */
54 xAdvance = 0x0004, /* Includes horizontal adjustment for advance */
55 yAdvance = 0x0008, /* Includes vertical adjustment for advance */
56 xPlaDevice = 0x0010, /* Includes horizontal Device table for placement */
57 yPlaDevice = 0x0020, /* Includes vertical Device table for placement */
58 xAdvDevice = 0x0040, /* Includes horizontal Device table for advance */
59 yAdvDevice = 0x0080, /* Includes vertical Device table for advance */
60 ignored = 0x0F00, /* Was used in TrueType Open for MM fonts */
61 reserved = 0xF000, /* For future use */
63 devices = 0x00F0 /* Mask for having any Device table */
66 /* All fields are options. Only those available advance the value pointer. */
68 SHORT xPlacement; /* Horizontal adjustment for
69 * placement--in design units */
70 SHORT yPlacement; /* Vertical adjustment for
71 * placement--in design units */
72 SHORT xAdvance; /* Horizontal adjustment for
73 * advance--in design units (only used
74 * for horizontal writing) */
75 SHORT yAdvance; /* Vertical adjustment for advance--in
76 * design units (only used for vertical
78 Offset xPlaDevice; /* Offset to Device table for
79 * horizontal placement--measured from
80 * beginning of PosTable (may be NULL) */
81 Offset yPlaDevice; /* Offset to Device table for vertical
82 * placement--measured from beginning
83 * of PosTable (may be NULL) */
84 Offset xAdvDevice; /* Offset to Device table for
85 * horizontal advance--measured from
86 * beginning of PosTable (may be NULL) */
87 Offset yAdvDevice; /* Offset to Device table for vertical
88 * advance--measured from beginning of
89 * PosTable (may be NULL) */
92 inline unsigned int get_len (void) const
93 { return _hb_popcount32 ((unsigned int) *this); }
94 inline unsigned int get_size (void) const
95 { return get_len () * Value::static_size; }
97 void apply_value (hb_font_t *font,
98 hb_direction_t direction,
101 hb_glyph_position_t &glyph_pos) const
103 unsigned int x_ppem, y_ppem;
104 unsigned int format = *this;
105 hb_bool_t horizontal = HB_DIRECTION_IS_HORIZONTAL (direction);
109 if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++));
110 if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++));
111 if (format & xAdvance) {
112 if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values++)); else values++;
114 /* y_advance values grow downward but font-space grows upward, hence negation */
115 if (format & yAdvance) {
116 if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values++)); else values++;
119 if (!has_device ()) return;
121 x_ppem = font->x_ppem;
122 y_ppem = font->y_ppem;
124 if (!x_ppem && !y_ppem) return;
126 /* pixel -> fractional pixel */
127 if (format & xPlaDevice) {
128 if (x_ppem) glyph_pos.x_offset += (base + get_device (values++)).get_x_delta (font); else values++;
130 if (format & yPlaDevice) {
131 if (y_ppem) glyph_pos.y_offset += (base + get_device (values++)).get_y_delta (font); else values++;
133 if (format & xAdvDevice) {
134 if (horizontal && x_ppem) glyph_pos.x_advance += (base + get_device (values++)).get_x_delta (font); else values++;
136 if (format & yAdvDevice) {
137 /* y_advance values grow downward but font-space grows upward, hence negation */
138 if (!horizontal && y_ppem) glyph_pos.y_advance -= (base + get_device (values++)).get_y_delta (font); else values++;
143 inline bool sanitize_value_devices (hb_sanitize_context_t *c, void *base, Value *values) {
144 unsigned int format = *this;
146 if (format & xPlacement) values++;
147 if (format & yPlacement) values++;
148 if (format & xAdvance) values++;
149 if (format & yAdvance) values++;
151 if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
152 if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
153 if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
154 if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
159 static inline OffsetTo<Device>& get_device (Value* value)
160 { return *CastP<OffsetTo<Device> > (value); }
161 static inline const OffsetTo<Device>& get_device (const Value* value)
162 { return *CastP<OffsetTo<Device> > (value); }
164 static inline const SHORT& get_short (const Value* value)
165 { return *CastP<SHORT> (value); }
169 inline bool has_device (void) const {
170 unsigned int format = *this;
171 return (format & devices) != 0;
174 inline bool sanitize_value (hb_sanitize_context_t *c, void *base, Value *values) {
175 TRACE_SANITIZE (this);
176 return TRACE_RETURN (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
179 inline bool sanitize_values (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count) {
180 TRACE_SANITIZE (this);
181 unsigned int len = get_len ();
183 if (!c->check_array (values, get_size (), count)) return TRACE_RETURN (false);
185 if (!has_device ()) return TRACE_RETURN (true);
187 for (unsigned int i = 0; i < count; i++) {
188 if (!sanitize_value_devices (c, base, values))
189 return TRACE_RETURN (false);
193 return TRACE_RETURN (true);
196 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */
197 inline bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count, unsigned int stride) {
198 TRACE_SANITIZE (this);
200 if (!has_device ()) return TRACE_RETURN (true);
202 for (unsigned int i = 0; i < count; i++) {
203 if (!sanitize_value_devices (c, base, values))
204 return TRACE_RETURN (false);
208 return TRACE_RETURN (true);
215 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED,
216 hb_position_t *x, hb_position_t *y) const
218 *x = font->em_scale_x (xCoordinate);
219 *y = font->em_scale_y (yCoordinate);
222 inline bool sanitize (hb_sanitize_context_t *c) {
223 TRACE_SANITIZE (this);
224 return TRACE_RETURN (c->check_struct (this));
228 USHORT format; /* Format identifier--format = 1 */
229 SHORT xCoordinate; /* Horizontal value--in design units */
230 SHORT yCoordinate; /* Vertical value--in design units */
232 DEFINE_SIZE_STATIC (6);
237 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id,
238 hb_position_t *x, hb_position_t *y) const
240 unsigned int x_ppem = font->x_ppem;
241 unsigned int y_ppem = font->y_ppem;
242 hb_position_t cx, cy;
243 hb_bool_t ret = false;
245 if (x_ppem || y_ppem)
246 ret = font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
247 *x = x_ppem && ret ? cx : font->em_scale_x (xCoordinate);
248 *y = y_ppem && ret ? cy : font->em_scale_y (yCoordinate);
251 inline bool sanitize (hb_sanitize_context_t *c) {
252 TRACE_SANITIZE (this);
253 return TRACE_RETURN (c->check_struct (this));
257 USHORT format; /* Format identifier--format = 2 */
258 SHORT xCoordinate; /* Horizontal value--in design units */
259 SHORT yCoordinate; /* Vertical value--in design units */
260 USHORT anchorPoint; /* Index to glyph contour point */
262 DEFINE_SIZE_STATIC (8);
267 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED,
268 hb_position_t *x, hb_position_t *y) const
270 *x = font->em_scale_x (xCoordinate);
271 *y = font->em_scale_y (yCoordinate);
274 *x += (this+xDeviceTable).get_x_delta (font);
276 *y += (this+yDeviceTable).get_x_delta (font);
279 inline bool sanitize (hb_sanitize_context_t *c) {
280 TRACE_SANITIZE (this);
281 return TRACE_RETURN (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
285 USHORT format; /* Format identifier--format = 3 */
286 SHORT xCoordinate; /* Horizontal value--in design units */
287 SHORT yCoordinate; /* Vertical value--in design units */
289 xDeviceTable; /* Offset to Device table for X
290 * coordinate-- from beginning of
291 * Anchor table (may be NULL) */
293 yDeviceTable; /* Offset to Device table for Y
294 * coordinate-- from beginning of
295 * Anchor table (may be NULL) */
297 DEFINE_SIZE_STATIC (10);
302 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id,
303 hb_position_t *x, hb_position_t *y) const
307 case 1: u.format1.get_anchor (font, glyph_id, x, y); return;
308 case 2: u.format2.get_anchor (font, glyph_id, x, y); return;
309 case 3: u.format3.get_anchor (font, glyph_id, x, y); return;
314 inline bool sanitize (hb_sanitize_context_t *c) {
315 TRACE_SANITIZE (this);
316 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
318 case 1: return TRACE_RETURN (u.format1.sanitize (c));
319 case 2: return TRACE_RETURN (u.format2.sanitize (c));
320 case 3: return TRACE_RETURN (u.format3.sanitize (c));
321 default:return TRACE_RETURN (true);
327 USHORT format; /* Format identifier */
328 AnchorFormat1 format1;
329 AnchorFormat2 format2;
330 AnchorFormat3 format3;
333 DEFINE_SIZE_UNION (2, format);
339 inline const Anchor& get_anchor (unsigned int row, unsigned int col, unsigned int cols, bool *found) const {
341 if (unlikely (row >= rows || col >= cols)) return Null(Anchor);
342 *found = !matrix[row * cols + col].is_null ();
343 return this+matrix[row * cols + col];
346 inline bool sanitize (hb_sanitize_context_t *c, unsigned int cols) {
347 TRACE_SANITIZE (this);
348 if (!c->check_struct (this)) return TRACE_RETURN (false);
349 if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return TRACE_RETURN (false);
350 unsigned int count = rows * cols;
351 if (!c->check_array (matrix, matrix[0].static_size, count)) return TRACE_RETURN (false);
352 for (unsigned int i = 0; i < count; i++)
353 if (!matrix[i].sanitize (c, this)) return TRACE_RETURN (false);
354 return TRACE_RETURN (true);
357 USHORT rows; /* Number of rows */
360 matrix[VAR]; /* Matrix of offsets to Anchor tables--
361 * from beginning of AnchorMatrix table */
363 DEFINE_SIZE_ARRAY (2, matrix);
369 friend struct MarkArray;
371 inline bool sanitize (hb_sanitize_context_t *c, void *base) {
372 TRACE_SANITIZE (this);
373 return TRACE_RETURN (c->check_struct (this) && markAnchor.sanitize (c, base));
377 USHORT klass; /* Class defined for this mark */
379 markAnchor; /* Offset to Anchor table--from
380 * beginning of MarkArray table */
382 DEFINE_SIZE_STATIC (4);
385 struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage order */
387 inline bool apply (hb_apply_context_t *c,
388 unsigned int mark_index, unsigned int glyph_index,
389 const AnchorMatrix &anchors, unsigned int class_count,
390 unsigned int glyph_pos) const
393 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index);
394 unsigned int mark_class = record.klass;
396 const Anchor& mark_anchor = this + record.markAnchor;
398 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
399 /* If this subtable doesn't have an anchor for this base and this class,
400 * return false such that the subsequent subtables have a chance at it. */
401 if (unlikely (!found)) return TRACE_RETURN (false);
403 hb_position_t mark_x, mark_y, base_x, base_y;
405 mark_anchor.get_anchor (c->font, c->buffer->cur().codepoint, &mark_x, &mark_y);
406 glyph_anchor.get_anchor (c->font, c->buffer->info[glyph_pos].codepoint, &base_x, &base_y);
408 hb_glyph_position_t &o = c->buffer->cur_pos();
409 o.x_offset = base_x - mark_x;
410 o.y_offset = base_y - mark_y;
411 o.attach_lookback() = c->buffer->idx - glyph_pos;
414 return TRACE_RETURN (true);
417 inline bool sanitize (hb_sanitize_context_t *c) {
418 TRACE_SANITIZE (this);
419 return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this));
426 struct SinglePosFormat1
428 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
430 TRACE_COLLECT_GLYPHS (this);
431 (this+coverage).add_coverage (c->input);
434 inline const Coverage &get_coverage (void) const
436 return this+coverage;
439 inline bool apply (hb_apply_context_t *c) const
442 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
443 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
445 valueFormat.apply_value (c->font, c->direction, this,
446 values, c->buffer->cur_pos());
449 return TRACE_RETURN (true);
452 inline bool sanitize (hb_sanitize_context_t *c) {
453 TRACE_SANITIZE (this);
454 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_value (c, this, values));
458 USHORT format; /* Format identifier--format = 1 */
460 coverage; /* Offset to Coverage table--from
461 * beginning of subtable */
462 ValueFormat valueFormat; /* Defines the types of data in the
464 ValueRecord values; /* Defines positioning
465 * value(s)--applied to all glyphs in
466 * the Coverage table */
468 DEFINE_SIZE_ARRAY (6, values);
471 struct SinglePosFormat2
473 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
475 TRACE_COLLECT_GLYPHS (this);
476 (this+coverage).add_coverage (c->input);
479 inline const Coverage &get_coverage (void) const
481 return this+coverage;
484 inline bool apply (hb_apply_context_t *c) const
487 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
488 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
490 if (likely (index >= valueCount)) return TRACE_RETURN (false);
492 valueFormat.apply_value (c->font, c->direction, this,
493 &values[index * valueFormat.get_len ()],
494 c->buffer->cur_pos());
497 return TRACE_RETURN (true);
500 inline bool sanitize (hb_sanitize_context_t *c) {
501 TRACE_SANITIZE (this);
502 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_values (c, this, values, valueCount));
506 USHORT format; /* Format identifier--format = 2 */
508 coverage; /* Offset to Coverage table--from
509 * beginning of subtable */
510 ValueFormat valueFormat; /* Defines the types of data in the
512 USHORT valueCount; /* Number of ValueRecords */
513 ValueRecord values; /* Array of ValueRecords--positioning
514 * values applied to glyphs */
516 DEFINE_SIZE_ARRAY (8, values);
521 template <typename context_t>
522 inline typename context_t::return_t process (context_t *c) const
524 TRACE_PROCESS (this);
526 case 1: return TRACE_RETURN (c->process (u.format1));
527 case 2: return TRACE_RETURN (c->process (u.format2));
528 default:return TRACE_RETURN (c->default_return_value ());
532 inline bool sanitize (hb_sanitize_context_t *c) {
533 TRACE_SANITIZE (this);
534 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
536 case 1: return TRACE_RETURN (u.format1.sanitize (c));
537 case 2: return TRACE_RETURN (u.format2.sanitize (c));
538 default:return TRACE_RETURN (true);
544 USHORT format; /* Format identifier */
545 SinglePosFormat1 format1;
546 SinglePosFormat2 format2;
551 struct PairValueRecord
553 friend struct PairSet;
556 GlyphID secondGlyph; /* GlyphID of second glyph in the
557 * pair--first glyph is listed in the
559 ValueRecord values; /* Positioning data for the first glyph
560 * followed by for second glyph */
562 DEFINE_SIZE_ARRAY (2, values);
567 friend struct PairPosFormat1;
569 inline void collect_glyphs (hb_collect_glyphs_context_t *c,
570 const ValueFormat *valueFormats) const
572 TRACE_COLLECT_GLYPHS (this);
573 unsigned int len1 = valueFormats[0].get_len ();
574 unsigned int len2 = valueFormats[1].get_len ();
575 unsigned int record_size = USHORT::static_size * (1 + len1 + len2);
577 const PairValueRecord *record = CastP<PairValueRecord> (array);
578 unsigned int count = len;
579 for (unsigned int i = 0; i < count; i++)
581 c->input->add (record->secondGlyph);
582 record = &StructAtOffset<PairValueRecord> (record, record_size);
586 inline bool apply (hb_apply_context_t *c,
587 const ValueFormat *valueFormats,
588 unsigned int pos) const
591 unsigned int len1 = valueFormats[0].get_len ();
592 unsigned int len2 = valueFormats[1].get_len ();
593 unsigned int record_size = USHORT::static_size * (1 + len1 + len2);
595 const PairValueRecord *record = CastP<PairValueRecord> (array);
596 unsigned int count = len;
597 for (unsigned int i = 0; i < count; i++)
599 if (c->buffer->info[pos].codepoint == record->secondGlyph)
601 valueFormats[0].apply_value (c->font, c->direction, this,
602 &record->values[0], c->buffer->cur_pos());
603 valueFormats[1].apply_value (c->font, c->direction, this,
604 &record->values[len1], c->buffer->pos[pos]);
607 c->buffer->idx = pos;
608 return TRACE_RETURN (true);
610 record = &StructAtOffset<PairValueRecord> (record, record_size);
613 return TRACE_RETURN (false);
616 struct sanitize_closure_t {
618 ValueFormat *valueFormats;
619 unsigned int len1; /* valueFormats[0].get_len() */
620 unsigned int stride; /* 1 + len1 + len2 */
623 inline bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) {
624 TRACE_SANITIZE (this);
625 if (!(c->check_struct (this)
626 && c->check_array (array, USHORT::static_size * closure->stride, len))) return TRACE_RETURN (false);
628 unsigned int count = len;
629 PairValueRecord *record = CastP<PairValueRecord> (array);
630 return TRACE_RETURN (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride)
631 && closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride));
635 USHORT len; /* Number of PairValueRecords */
636 USHORT array[VAR]; /* Array of PairValueRecords--ordered
637 * by GlyphID of the second glyph */
639 DEFINE_SIZE_ARRAY (2, array);
642 struct PairPosFormat1
644 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
646 TRACE_COLLECT_GLYPHS (this);
647 (this+coverage).add_coverage (c->input);
648 unsigned int count = pairSet.len;
649 for (unsigned int i = 0; i < count; i++)
650 (this+pairSet[i]).collect_glyphs (c, &valueFormat1);
653 inline const Coverage &get_coverage (void) const
655 return this+coverage;
658 inline bool apply (hb_apply_context_t *c) const
661 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1);
662 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
664 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
665 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
667 if (!skippy_iter.next ()) return TRACE_RETURN (false);
669 return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx));
672 inline bool sanitize (hb_sanitize_context_t *c) {
673 TRACE_SANITIZE (this);
675 unsigned int len1 = valueFormat1.get_len ();
676 unsigned int len2 = valueFormat2.get_len ();
677 PairSet::sanitize_closure_t closure = {
684 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
688 USHORT format; /* Format identifier--format = 1 */
690 coverage; /* Offset to Coverage table--from
691 * beginning of subtable */
692 ValueFormat valueFormat1; /* Defines the types of data in
693 * ValueRecord1--for the first glyph
694 * in the pair--may be zero (0) */
695 ValueFormat valueFormat2; /* Defines the types of data in
696 * ValueRecord2--for the second glyph
697 * in the pair--may be zero (0) */
698 OffsetArrayOf<PairSet>
699 pairSet; /* Array of PairSet tables
700 * ordered by Coverage Index */
702 DEFINE_SIZE_ARRAY (10, pairSet);
705 struct PairPosFormat2
707 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
709 TRACE_COLLECT_GLYPHS (this);
710 /* (this+coverage).add_coverage (c->input); // Don't need this. */
712 /* TODO only add values for pairs that have nonzero adjustments. */
714 unsigned int count1 = class1Count;
715 const ClassDef &klass1 = this+classDef1;
716 for (unsigned int i = 0; i < count1; i++)
717 klass1.add_class (c->input, i);
719 unsigned int count2 = class2Count;
720 const ClassDef &klass2 = this+classDef2;
721 for (unsigned int i = 0; i < count2; i++)
722 klass2.add_class (c->input, i);
725 inline const Coverage &get_coverage (void) const
727 return this+coverage;
730 inline bool apply (hb_apply_context_t *c) const
733 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1);
734 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
736 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
737 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
739 if (!skippy_iter.next ()) return TRACE_RETURN (false);
741 unsigned int len1 = valueFormat1.get_len ();
742 unsigned int len2 = valueFormat2.get_len ();
743 unsigned int record_len = len1 + len2;
745 unsigned int klass1 = (this+classDef1).get_class (c->buffer->cur().codepoint);
746 unsigned int klass2 = (this+classDef2).get_class (c->buffer->info[skippy_iter.idx].codepoint);
747 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_RETURN (false);
749 const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
750 valueFormat1.apply_value (c->font, c->direction, this,
751 v, c->buffer->cur_pos());
752 valueFormat2.apply_value (c->font, c->direction, this,
753 v + len1, c->buffer->pos[skippy_iter.idx]);
755 c->buffer->idx = skippy_iter.idx;
759 return TRACE_RETURN (true);
762 inline bool sanitize (hb_sanitize_context_t *c) {
763 TRACE_SANITIZE (this);
764 if (!(c->check_struct (this)
765 && coverage.sanitize (c, this)
766 && classDef1.sanitize (c, this)
767 && classDef2.sanitize (c, this))) return TRACE_RETURN (false);
769 unsigned int len1 = valueFormat1.get_len ();
770 unsigned int len2 = valueFormat2.get_len ();
771 unsigned int stride = len1 + len2;
772 unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
773 unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
774 return TRACE_RETURN (c->check_array (values, record_size, count) &&
775 valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
776 valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
780 USHORT format; /* Format identifier--format = 2 */
782 coverage; /* Offset to Coverage table--from
783 * beginning of subtable */
784 ValueFormat valueFormat1; /* ValueRecord definition--for the
785 * first glyph of the pair--may be zero
787 ValueFormat valueFormat2; /* ValueRecord definition--for the
788 * second glyph of the pair--may be
791 classDef1; /* Offset to ClassDef table--from
792 * beginning of PairPos subtable--for
793 * the first glyph of the pair */
795 classDef2; /* Offset to ClassDef table--from
796 * beginning of PairPos subtable--for
797 * the second glyph of the pair */
798 USHORT class1Count; /* Number of classes in ClassDef1
799 * table--includes Class0 */
800 USHORT class2Count; /* Number of classes in ClassDef2
801 * table--includes Class0 */
802 ValueRecord values; /* Matrix of value pairs:
803 * class1-major, class2-minor,
804 * Each entry has value1 and value2 */
806 DEFINE_SIZE_ARRAY (16, values);
811 template <typename context_t>
812 inline typename context_t::return_t process (context_t *c) const
814 TRACE_PROCESS (this);
816 case 1: return TRACE_RETURN (c->process (u.format1));
817 case 2: return TRACE_RETURN (c->process (u.format2));
818 default:return TRACE_RETURN (c->default_return_value ());
822 inline bool sanitize (hb_sanitize_context_t *c) {
823 TRACE_SANITIZE (this);
824 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
826 case 1: return TRACE_RETURN (u.format1.sanitize (c));
827 case 2: return TRACE_RETURN (u.format2.sanitize (c));
828 default:return TRACE_RETURN (true);
834 USHORT format; /* Format identifier */
835 PairPosFormat1 format1;
836 PairPosFormat2 format2;
841 struct EntryExitRecord
843 friend struct CursivePosFormat1;
845 inline bool sanitize (hb_sanitize_context_t *c, void *base) {
846 TRACE_SANITIZE (this);
847 return TRACE_RETURN (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
852 entryAnchor; /* Offset to EntryAnchor table--from
853 * beginning of CursivePos
854 * subtable--may be NULL */
856 exitAnchor; /* Offset to ExitAnchor table--from
857 * beginning of CursivePos
858 * subtable--may be NULL */
860 DEFINE_SIZE_STATIC (4);
863 struct CursivePosFormat1
865 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
867 TRACE_COLLECT_GLYPHS (this);
868 (this+coverage).add_coverage (c->input);
871 inline const Coverage &get_coverage (void) const
873 return this+coverage;
876 inline bool apply (hb_apply_context_t *c) const
880 /* We don't handle mark glyphs here. */
881 if (c->property & HB_OT_LAYOUT_GLYPH_PROPS_MARK) return TRACE_RETURN (false);
883 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1);
884 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
886 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (c->buffer->cur().codepoint)];
887 if (!this_record.exitAnchor) return TRACE_RETURN (false);
889 if (!skippy_iter.next ()) return TRACE_RETURN (false);
891 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (c->buffer->info[skippy_iter.idx].codepoint)];
892 if (!next_record.entryAnchor) return TRACE_RETURN (false);
894 unsigned int i = c->buffer->idx;
895 unsigned int j = skippy_iter.idx;
897 hb_position_t entry_x, entry_y, exit_x, exit_y;
898 (this+this_record.exitAnchor).get_anchor (c->font, c->buffer->info[i].codepoint, &exit_x, &exit_y);
899 (this+next_record.entryAnchor).get_anchor (c->font, c->buffer->info[j].codepoint, &entry_x, &entry_y);
901 hb_glyph_position_t *pos = c->buffer->pos;
904 /* Main-direction adjustment */
905 switch (c->direction) {
906 case HB_DIRECTION_LTR:
907 pos[i].x_advance = exit_x + pos[i].x_offset;
909 d = entry_x + pos[j].x_offset;
910 pos[j].x_advance -= d;
911 pos[j].x_offset -= d;
913 case HB_DIRECTION_RTL:
914 d = exit_x + pos[i].x_offset;
915 pos[i].x_advance -= d;
916 pos[i].x_offset -= d;
918 pos[j].x_advance = entry_x + pos[j].x_offset;
920 case HB_DIRECTION_TTB:
921 pos[i].y_advance = exit_y + pos[i].y_offset;
923 d = entry_y + pos[j].y_offset;
924 pos[j].y_advance -= d;
925 pos[j].y_offset -= d;
927 case HB_DIRECTION_BTT:
928 d = exit_y + pos[i].y_offset;
929 pos[i].y_advance -= d;
930 pos[i].y_offset -= d;
932 pos[j].y_advance = entry_y;
934 case HB_DIRECTION_INVALID:
939 /* Cross-direction adjustment */
940 if (c->lookup_props & LookupFlag::RightToLeft) {
941 pos[i].cursive_chain() = j - i;
942 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
943 pos[i].y_offset = entry_y - exit_y;
945 pos[i].x_offset = entry_x - exit_x;
947 pos[j].cursive_chain() = i - j;
948 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
949 pos[j].y_offset = exit_y - entry_y;
951 pos[j].x_offset = exit_x - entry_x;
955 return TRACE_RETURN (true);
958 inline bool sanitize (hb_sanitize_context_t *c) {
959 TRACE_SANITIZE (this);
960 return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
964 USHORT format; /* Format identifier--format = 1 */
966 coverage; /* Offset to Coverage table--from
967 * beginning of subtable */
968 ArrayOf<EntryExitRecord>
969 entryExitRecord; /* Array of EntryExit records--in
970 * Coverage Index order */
972 DEFINE_SIZE_ARRAY (6, entryExitRecord);
977 template <typename context_t>
978 inline typename context_t::return_t process (context_t *c) const
980 TRACE_PROCESS (this);
982 case 1: return TRACE_RETURN (c->process (u.format1));
983 default:return TRACE_RETURN (c->default_return_value ());
987 inline bool sanitize (hb_sanitize_context_t *c) {
988 TRACE_SANITIZE (this);
989 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
991 case 1: return TRACE_RETURN (u.format1.sanitize (c));
992 default:return TRACE_RETURN (true);
998 USHORT format; /* Format identifier */
999 CursivePosFormat1 format1;
1004 typedef AnchorMatrix BaseArray; /* base-major--
1005 * in order of BaseCoverage Index--,
1007 * ordered by class--zero-based. */
1009 struct MarkBasePosFormat1
1011 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1013 TRACE_COLLECT_GLYPHS (this);
1014 (this+markCoverage).add_coverage (c->input);
1015 (this+baseCoverage).add_coverage (c->input);
1016 /* TODO only add combinations that have nonzero adjustment. */
1019 inline const Coverage &get_coverage (void) const
1021 return this+markCoverage;
1024 inline bool apply (hb_apply_context_t *c) const
1027 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur().codepoint);
1028 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false);
1030 /* now we search backwards for a non-mark glyph */
1031 unsigned int property;
1032 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1);
1034 if (!skippy_iter.prev (&property, LookupFlag::IgnoreMarks)) return TRACE_RETURN (false);
1035 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */
1036 if (0 == get_lig_comp (c->buffer->info[skippy_iter.idx])) break;
1037 skippy_iter.reject ();
1040 /* The following assertion is too strong, so we've disabled it. */
1041 if (!(property & HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH)) {/*return TRACE_RETURN (false);*/}
1043 unsigned int base_index = (this+baseCoverage).get_coverage (c->buffer->info[skippy_iter.idx].codepoint);
1044 if (base_index == NOT_COVERED) return TRACE_RETURN (false);
1046 return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
1049 inline bool sanitize (hb_sanitize_context_t *c) {
1050 TRACE_SANITIZE (this);
1051 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && baseCoverage.sanitize (c, this) &&
1052 markArray.sanitize (c, this) && baseArray.sanitize (c, this, (unsigned int) classCount));
1056 USHORT format; /* Format identifier--format = 1 */
1058 markCoverage; /* Offset to MarkCoverage table--from
1059 * beginning of MarkBasePos subtable */
1061 baseCoverage; /* Offset to BaseCoverage table--from
1062 * beginning of MarkBasePos subtable */
1063 USHORT classCount; /* Number of classes defined for marks */
1065 markArray; /* Offset to MarkArray table--from
1066 * beginning of MarkBasePos subtable */
1068 baseArray; /* Offset to BaseArray table--from
1069 * beginning of MarkBasePos subtable */
1071 DEFINE_SIZE_STATIC (12);
1076 template <typename context_t>
1077 inline typename context_t::return_t process (context_t *c) const
1079 TRACE_PROCESS (this);
1081 case 1: return TRACE_RETURN (c->process (u.format1));
1082 default:return TRACE_RETURN (c->default_return_value ());
1086 inline bool sanitize (hb_sanitize_context_t *c) {
1087 TRACE_SANITIZE (this);
1088 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1090 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1091 default:return TRACE_RETURN (true);
1097 USHORT format; /* Format identifier */
1098 MarkBasePosFormat1 format1;
1103 typedef AnchorMatrix LigatureAttach; /* component-major--
1104 * in order of writing direction--,
1106 * ordered by class--zero-based. */
1108 typedef OffsetListOf<LigatureAttach> LigatureArray;
1109 /* Array of LigatureAttach
1111 * LigatureCoverage Index */
1113 struct MarkLigPosFormat1
1115 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1117 TRACE_COLLECT_GLYPHS (this);
1118 (this+markCoverage).add_coverage (c->input);
1119 (this+ligatureCoverage).add_coverage (c->input);
1120 /* TODO only add combinations that have nonzero adjustment. */
1123 inline const Coverage &get_coverage (void) const
1125 return this+markCoverage;
1128 inline bool apply (hb_apply_context_t *c) const
1131 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur().codepoint);
1132 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false);
1134 /* now we search backwards for a non-mark glyph */
1135 unsigned int property;
1136 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1);
1137 if (!skippy_iter.prev (&property, LookupFlag::IgnoreMarks)) return TRACE_RETURN (false);
1139 /* The following assertion is too strong, so we've disabled it. */
1140 if (!(property & HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE)) {/*return TRACE_RETURN (false);*/}
1142 unsigned int j = skippy_iter.idx;
1143 unsigned int lig_index = (this+ligatureCoverage).get_coverage (c->buffer->info[j].codepoint);
1144 if (lig_index == NOT_COVERED) return TRACE_RETURN (false);
1146 const LigatureArray& lig_array = this+ligatureArray;
1147 const LigatureAttach& lig_attach = lig_array[lig_index];
1149 /* Find component to attach to */
1150 unsigned int comp_count = lig_attach.rows;
1151 if (unlikely (!comp_count)) return TRACE_RETURN (false);
1153 /* We must now check whether the ligature ID of the current mark glyph
1154 * is identical to the ligature ID of the found ligature. If yes, we
1155 * can directly use the component index. If not, we attach the mark
1156 * glyph to the last component of the ligature. */
1157 unsigned int comp_index;
1158 unsigned int lig_id = get_lig_id (c->buffer->info[j]);
1159 unsigned int mark_id = get_lig_id (c->buffer->cur());
1160 unsigned int mark_comp = get_lig_comp (c->buffer->cur());
1161 if (lig_id && lig_id == mark_id && mark_comp > 0)
1162 comp_index = MIN (comp_count, get_lig_comp (c->buffer->cur())) - 1;
1164 comp_index = comp_count - 1;
1166 return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
1169 inline bool sanitize (hb_sanitize_context_t *c) {
1170 TRACE_SANITIZE (this);
1171 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && ligatureCoverage.sanitize (c, this) &&
1172 markArray.sanitize (c, this) && ligatureArray.sanitize (c, this, (unsigned int) classCount));
1176 USHORT format; /* Format identifier--format = 1 */
1178 markCoverage; /* Offset to Mark Coverage table--from
1179 * beginning of MarkLigPos subtable */
1181 ligatureCoverage; /* Offset to Ligature Coverage
1182 * table--from beginning of MarkLigPos
1184 USHORT classCount; /* Number of defined mark classes */
1186 markArray; /* Offset to MarkArray table--from
1187 * beginning of MarkLigPos subtable */
1188 OffsetTo<LigatureArray>
1189 ligatureArray; /* Offset to LigatureArray table--from
1190 * beginning of MarkLigPos subtable */
1192 DEFINE_SIZE_STATIC (12);
1197 template <typename context_t>
1198 inline typename context_t::return_t process (context_t *c) const
1200 TRACE_PROCESS (this);
1202 case 1: return TRACE_RETURN (c->process (u.format1));
1203 default:return TRACE_RETURN (c->default_return_value ());
1207 inline bool sanitize (hb_sanitize_context_t *c) {
1208 TRACE_SANITIZE (this);
1209 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1211 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1212 default:return TRACE_RETURN (true);
1218 USHORT format; /* Format identifier */
1219 MarkLigPosFormat1 format1;
1224 typedef AnchorMatrix Mark2Array; /* mark2-major--
1225 * in order of Mark2Coverage Index--,
1227 * ordered by class--zero-based. */
1229 struct MarkMarkPosFormat1
1231 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1233 TRACE_COLLECT_GLYPHS (this);
1234 (this+mark1Coverage).add_coverage (c->input);
1235 (this+mark2Coverage).add_coverage (c->input);
1236 /* TODO only add combinations that have nonzero adjustment. */
1239 inline const Coverage &get_coverage (void) const
1241 return this+mark1Coverage;
1244 inline bool apply (hb_apply_context_t *c) const
1247 unsigned int mark1_index = (this+mark1Coverage).get_coverage (c->buffer->cur().codepoint);
1248 if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false);
1250 /* now we search backwards for a suitable mark glyph until a non-mark glyph */
1251 unsigned int property;
1252 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1);
1253 if (!skippy_iter.prev (&property)) return TRACE_RETURN (false);
1255 if (!(property & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) return TRACE_RETURN (false);
1257 unsigned int j = skippy_iter.idx;
1259 unsigned int id1 = get_lig_id (c->buffer->cur());
1260 unsigned int id2 = get_lig_id (c->buffer->info[j]);
1261 unsigned int comp1 = get_lig_comp (c->buffer->cur());
1262 unsigned int comp2 = get_lig_comp (c->buffer->info[j]);
1264 if (likely (id1 == id2)) {
1265 if (id1 == 0) /* Marks belonging to the same base. */
1267 else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
1270 /* If ligature ids don't match, it may be the case that one of the marks
1271 * itself is a ligature. In which case match. */
1272 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
1277 return TRACE_RETURN (false);
1280 unsigned int mark2_index = (this+mark2Coverage).get_coverage (c->buffer->info[j].codepoint);
1281 if (mark2_index == NOT_COVERED) return TRACE_RETURN (false);
1283 return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
1286 inline bool sanitize (hb_sanitize_context_t *c) {
1287 TRACE_SANITIZE (this);
1288 return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, this) &&
1289 mark2Coverage.sanitize (c, this) && mark1Array.sanitize (c, this)
1290 && mark2Array.sanitize (c, this, (unsigned int) classCount));
1294 USHORT format; /* Format identifier--format = 1 */
1296 mark1Coverage; /* Offset to Combining Mark1 Coverage
1297 * table--from beginning of MarkMarkPos
1300 mark2Coverage; /* Offset to Combining Mark2 Coverage
1301 * table--from beginning of MarkMarkPos
1303 USHORT classCount; /* Number of defined mark classes */
1305 mark1Array; /* Offset to Mark1Array table--from
1306 * beginning of MarkMarkPos subtable */
1307 OffsetTo<Mark2Array>
1308 mark2Array; /* Offset to Mark2Array table--from
1309 * beginning of MarkMarkPos subtable */
1311 DEFINE_SIZE_STATIC (12);
1316 template <typename context_t>
1317 inline typename context_t::return_t process (context_t *c) const
1319 TRACE_PROCESS (this);
1321 case 1: return TRACE_RETURN (c->process (u.format1));
1322 default:return TRACE_RETURN (c->default_return_value ());
1326 inline bool sanitize (hb_sanitize_context_t *c) {
1327 TRACE_SANITIZE (this);
1328 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1330 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1331 default:return TRACE_RETURN (true);
1337 USHORT format; /* Format identifier */
1338 MarkMarkPosFormat1 format1;
1343 struct ContextPos : Context {};
1345 struct ChainContextPos : ChainContext {};
1347 struct ExtensionPos : Extension<ExtensionPos>
1349 typedef struct PosLookupSubTable LookupSubTable;
1359 struct PosLookupSubTable
1361 friend struct PosLookup;
1375 template <typename context_t>
1376 inline typename context_t::return_t process (context_t *c, unsigned int lookup_type) const
1378 TRACE_PROCESS (this);
1379 switch (lookup_type) {
1380 case Single: return TRACE_RETURN (u.single.process (c));
1381 case Pair: return TRACE_RETURN (u.pair.process (c));
1382 case Cursive: return TRACE_RETURN (u.cursive.process (c));
1383 case MarkBase: return TRACE_RETURN (u.markBase.process (c));
1384 case MarkLig: return TRACE_RETURN (u.markLig.process (c));
1385 case MarkMark: return TRACE_RETURN (u.markMark.process (c));
1386 case Context: return TRACE_RETURN (u.context.process (c));
1387 case ChainContext: return TRACE_RETURN (u.chainContext.process (c));
1388 case Extension: return TRACE_RETURN (u.extension.process (c));
1389 default: return TRACE_RETURN (c->default_return_value ());
1393 inline bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) {
1394 TRACE_SANITIZE (this);
1395 if (!u.header.sub_format.sanitize (c))
1396 return TRACE_RETURN (false);
1397 switch (lookup_type) {
1398 case Single: return TRACE_RETURN (u.single.sanitize (c));
1399 case Pair: return TRACE_RETURN (u.pair.sanitize (c));
1400 case Cursive: return TRACE_RETURN (u.cursive.sanitize (c));
1401 case MarkBase: return TRACE_RETURN (u.markBase.sanitize (c));
1402 case MarkLig: return TRACE_RETURN (u.markLig.sanitize (c));
1403 case MarkMark: return TRACE_RETURN (u.markMark.sanitize (c));
1404 case Context: return TRACE_RETURN (u.context.sanitize (c));
1405 case ChainContext: return TRACE_RETURN (u.chainContext.sanitize (c));
1406 case Extension: return TRACE_RETURN (u.extension.sanitize (c));
1407 default: return TRACE_RETURN (true);
1419 MarkBasePos markBase;
1421 MarkMarkPos markMark;
1423 ChainContextPos chainContext;
1424 ExtensionPos extension;
1427 DEFINE_SIZE_UNION (2, header.sub_format);
1431 struct PosLookup : Lookup
1433 inline const PosLookupSubTable& get_subtable (unsigned int i) const
1434 { return this+CastR<OffsetArrayOf<PosLookupSubTable> > (subTable)[i]; }
1436 template <typename context_t>
1437 inline typename context_t::return_t process (context_t *c) const
1439 TRACE_PROCESS (this);
1440 unsigned int lookup_type = get_type ();
1441 unsigned int count = get_subtable_count ();
1442 for (unsigned int i = 0; i < count; i++) {
1443 typename context_t::return_t r = get_subtable (i).process (c, lookup_type);
1444 if (c->stop_sublookup_iteration (r))
1445 return TRACE_RETURN (r);
1447 return TRACE_RETURN (c->default_return_value ());
1449 template <typename context_t>
1450 static inline typename context_t::return_t process_recurse_func (context_t *c, unsigned int lookup_index);
1452 inline hb_collect_glyphs_context_t::return_t collect_glyphs_lookup (hb_collect_glyphs_context_t *c) const
1454 TRACE_COLLECT_GLYPHS (this);
1455 c->set_recurse_func (NULL);
1456 return TRACE_RETURN (process (c));
1459 template <typename set_t>
1460 inline void add_coverage (set_t *glyphs) const
1462 hb_get_coverage_context_t c;
1463 const Coverage *last = NULL;
1464 unsigned int count = get_subtable_count ();
1465 for (unsigned int i = 0; i < count; i++) {
1466 const Coverage *coverage = &get_subtable (i).process (&c, get_type ());
1467 if (coverage != last) {
1468 coverage->add_coverage (glyphs);
1474 inline bool apply_once (hb_apply_context_t *c) const
1477 if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props, &c->property))
1478 return TRACE_RETURN (false);
1479 return TRACE_RETURN (process (c));
1482 static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index);
1484 inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest) const
1488 if (unlikely (!c->buffer->len || !c->lookup_mask))
1491 c->set_recurse_func (apply_recurse_func);
1492 c->set_lookup (*this);
1496 while (c->buffer->idx < c->buffer->len)
1498 if ((c->buffer->cur().mask & c->lookup_mask) &&
1499 digest->may_have (c->buffer->cur().codepoint) &&
1509 inline bool sanitize (hb_sanitize_context_t *c) {
1510 TRACE_SANITIZE (this);
1511 if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false);
1512 OffsetArrayOf<PosLookupSubTable> &list = CastR<OffsetArrayOf<PosLookupSubTable> > (subTable);
1513 return TRACE_RETURN (list.sanitize (c, this, get_type ()));
1517 typedef OffsetListOf<PosLookup> PosLookupList;
1520 * GPOS -- The Glyph Positioning Table
1523 struct GPOS : GSUBGPOS
1525 static const hb_tag_t Tag = HB_OT_TAG_GPOS;
1527 inline const PosLookup& get_lookup (unsigned int i) const
1528 { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); }
1530 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
1531 static inline void position_finish (hb_font_t *font, hb_buffer_t *buffer, hb_bool_t zero_width_attahced_marks);
1533 inline bool sanitize (hb_sanitize_context_t *c) {
1534 TRACE_SANITIZE (this);
1535 if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false);
1536 OffsetTo<PosLookupList> &list = CastR<OffsetTo<PosLookupList> > (lookupList);
1537 return TRACE_RETURN (list.sanitize (c, this));
1540 DEFINE_SIZE_STATIC (10);
1545 fix_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction)
1547 unsigned int j = pos[i].cursive_chain();
1553 pos[i].cursive_chain() = 0;
1555 fix_cursive_minor_offset (pos, j, direction);
1557 if (HB_DIRECTION_IS_HORIZONTAL (direction))
1558 pos[i].y_offset += pos[j].y_offset;
1560 pos[i].x_offset += pos[j].x_offset;
1564 fix_mark_attachment (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, hb_bool_t zero_width_attached_marks)
1566 if (likely (!(pos[i].attach_lookback())))
1569 unsigned int j = i - pos[i].attach_lookback();
1571 if (zero_width_attached_marks) {
1572 pos[i].x_advance = 0;
1573 pos[i].y_advance = 0;
1575 pos[i].x_offset += pos[j].x_offset;
1576 pos[i].y_offset += pos[j].y_offset;
1578 if (HB_DIRECTION_IS_FORWARD (direction))
1579 for (unsigned int k = j; k < i; k++) {
1580 pos[i].x_offset -= pos[k].x_advance;
1581 pos[i].y_offset -= pos[k].y_advance;
1584 for (unsigned int k = j + 1; k < i + 1; k++) {
1585 pos[i].x_offset += pos[k].x_advance;
1586 pos[i].y_offset += pos[k].y_advance;
1591 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
1593 buffer->clear_positions ();
1595 unsigned int count = buffer->len;
1596 for (unsigned int i = 0; i < count; i++)
1597 buffer->pos[i].attach_lookback() = buffer->pos[i].cursive_chain() = 0;
1601 GPOS::position_finish (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer, hb_bool_t zero_width_attached_marks)
1604 hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
1605 hb_direction_t direction = buffer->props.direction;
1607 /* Handle cursive connections */
1608 for (unsigned int i = 0; i < len; i++)
1609 fix_cursive_minor_offset (pos, i, direction);
1611 /* Handle attachments */
1612 for (unsigned int i = 0; i < len; i++)
1613 fix_mark_attachment (pos, i, direction, zero_width_attached_marks);
1615 HB_BUFFER_DEALLOCATE_VAR (buffer, syllable);
1616 HB_BUFFER_DEALLOCATE_VAR (buffer, lig_props);
1617 HB_BUFFER_DEALLOCATE_VAR (buffer, glyph_props);
1621 /* Out-of-class implementation for methods recursing */
1623 template <typename context_t>
1624 inline typename context_t::return_t PosLookup::process_recurse_func (context_t *c, unsigned int lookup_index)
1626 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos);
1627 const PosLookup &l = gpos.get_lookup (lookup_index);
1628 return l.process (c);
1631 inline bool PosLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index)
1633 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos);
1634 const PosLookup &l = gpos.get_lookup (lookup_index);
1635 unsigned int saved_lookup_props = c->lookup_props;
1636 unsigned int saved_property = c->property;
1638 bool ret = l.apply_once (c);
1639 c->lookup_props = saved_lookup_props;
1640 c->property = saved_property;
1645 #undef attach_lookback
1646 #undef cursive_chain
1649 } /* namespace OT */
1652 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */