2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
32 #include "hb-ot-layout-gsubgpos.hh"
38 /* buffer **position** var allocations */
39 #define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
40 #define attach_type() var.u8[2] /* attachment type */
41 /* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
44 ATTACH_TYPE_NONE = 0X00,
46 /* Each attachment should be either a mark or a cursive; can't be both. */
47 ATTACH_TYPE_MARK = 0X01,
48 ATTACH_TYPE_CURSIVE = 0X02,
52 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
54 typedef HBUINT16 Value;
56 typedef UnsizedArrayOf<Value> ValueRecord;
58 struct ValueFormat : HBUINT16
61 xPlacement = 0x0001u, /* Includes horizontal adjustment for placement */
62 yPlacement = 0x0002u, /* Includes vertical adjustment for placement */
63 xAdvance = 0x0004u, /* Includes horizontal adjustment for advance */
64 yAdvance = 0x0008u, /* Includes vertical adjustment for advance */
65 xPlaDevice = 0x0010u, /* Includes horizontal Device table for placement */
66 yPlaDevice = 0x0020u, /* Includes vertical Device table for placement */
67 xAdvDevice = 0x0040u, /* Includes horizontal Device table for advance */
68 yAdvDevice = 0x0080u, /* Includes vertical Device table for advance */
69 ignored = 0x0F00u, /* Was used in TrueType Open for MM fonts */
70 reserved = 0xF000u, /* For future use */
72 devices = 0x00F0u /* Mask for having any Device table */
75 /* All fields are options. Only those available advance the value pointer. */
77 HBINT16 xPlacement; /* Horizontal adjustment for
78 * placement--in design units */
79 HBINT16 yPlacement; /* Vertical adjustment for
80 * placement--in design units */
81 HBINT16 xAdvance; /* Horizontal adjustment for
82 * advance--in design units (only used
83 * for horizontal writing) */
84 HBINT16 yAdvance; /* Vertical adjustment for advance--in
85 * design units (only used for vertical
87 OffsetTo<Device> xPlaDevice; /* Offset to Device table for
88 * horizontal placement--measured from
89 * beginning of PosTable (may be NULL) */
90 OffsetTo<Device> yPlaDevice; /* Offset to Device table for vertical
91 * placement--measured from beginning
92 * of PosTable (may be NULL) */
93 OffsetTo<Device> xAdvDevice; /* Offset to Device table for
94 * horizontal advance--measured from
95 * beginning of PosTable (may be NULL) */
96 OffsetTo<Device> yAdvDevice; /* Offset to Device table for vertical
97 * advance--measured from beginning of
98 * PosTable (may be NULL) */
101 unsigned int get_len () const { return hb_popcount ((unsigned int) *this); }
102 unsigned int get_size () const { return get_len () * Value::static_size; }
104 bool apply_value (hb_ot_apply_context_t *c,
107 hb_glyph_position_t &glyph_pos) const
110 unsigned int format = *this;
111 if (!format) return ret;
113 hb_font_t *font = c->font;
114 bool horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
116 if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++, &ret));
117 if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++, &ret));
118 if (format & xAdvance) {
119 if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
122 /* y_advance values grow downward but font-space grows upward, hence negation */
123 if (format & yAdvance) {
124 if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
128 if (!has_device ()) return ret;
130 bool use_x_device = font->x_ppem || font->num_coords;
131 bool use_y_device = font->y_ppem || font->num_coords;
133 if (!use_x_device && !use_y_device) return ret;
135 const VariationStore &store = c->var_store;
137 /* pixel -> fractional pixel */
138 if (format & xPlaDevice) {
139 if (use_x_device) glyph_pos.x_offset += (base + get_device (values, &ret)).get_x_delta (font, store);
142 if (format & yPlaDevice) {
143 if (use_y_device) glyph_pos.y_offset += (base + get_device (values, &ret)).get_y_delta (font, store);
146 if (format & xAdvDevice) {
147 if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store);
150 if (format & yAdvDevice) {
151 /* y_advance values grow downward but font-space grows upward, hence negation */
152 if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store);
159 bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
161 unsigned int format = *this;
163 if (format & xPlacement) values++;
164 if (format & yPlacement) values++;
165 if (format & xAdvance) values++;
166 if (format & yAdvance) values++;
168 if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
169 if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
170 if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
171 if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
176 HB_INTERNAL static OffsetTo<Device>& get_device (Value* value)
177 { return *CastP<OffsetTo<Device>> (value); }
178 HB_INTERNAL static const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
180 if (worked) *worked |= bool (*value);
181 return *CastP<OffsetTo<Device>> (value);
184 HB_INTERNAL static const HBINT16& get_short (const Value* value, bool *worked=nullptr)
186 if (worked) *worked |= bool (*value);
187 return *CastP<HBINT16> (value);
192 bool has_device () const
194 unsigned int format = *this;
195 return (format & devices) != 0;
198 bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
200 TRACE_SANITIZE (this);
201 return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
204 bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
206 TRACE_SANITIZE (this);
207 unsigned int len = get_len ();
209 if (!c->check_range (values, count, get_size ())) return_trace (false);
211 if (!has_device ()) return_trace (true);
213 for (unsigned int i = 0; i < count; i++) {
214 if (!sanitize_value_devices (c, base, values))
215 return_trace (false);
222 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */
223 bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
225 TRACE_SANITIZE (this);
227 if (!has_device ()) return_trace (true);
229 for (unsigned int i = 0; i < count; i++) {
230 if (!sanitize_value_devices (c, base, values))
231 return_trace (false);
239 template<typename Iterator>
240 static inline void SinglePos_serialize (hb_serialize_context_t *c,
242 ValueFormat valFormat);
247 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
248 float *x, float *y) const
250 hb_font_t *font = c->font;
251 *x = font->em_fscale_x (xCoordinate);
252 *y = font->em_fscale_y (yCoordinate);
255 bool sanitize (hb_sanitize_context_t *c) const
257 TRACE_SANITIZE (this);
258 return_trace (c->check_struct (this));
261 AnchorFormat1* copy (hb_serialize_context_t *c) const
263 TRACE_SERIALIZE (this);
264 return_trace (c->embed<AnchorFormat1> (this));
268 HBUINT16 format; /* Format identifier--format = 1 */
269 FWORD xCoordinate; /* Horizontal value--in design units */
270 FWORD yCoordinate; /* Vertical value--in design units */
272 DEFINE_SIZE_STATIC (6);
277 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
278 float *x, float *y) const
280 hb_font_t *font = c->font;
283 *x = font->em_fscale_x (xCoordinate);
284 *y = font->em_fscale_y (yCoordinate);
288 unsigned int x_ppem = font->x_ppem;
289 unsigned int y_ppem = font->y_ppem;
290 hb_position_t cx = 0, cy = 0;
293 ret = (x_ppem || y_ppem) &&
294 font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
295 *x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
296 *y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
299 bool sanitize (hb_sanitize_context_t *c) const
301 TRACE_SANITIZE (this);
302 return_trace (c->check_struct (this));
305 AnchorFormat2* copy (hb_serialize_context_t *c) const
307 TRACE_SERIALIZE (this);
308 return_trace (c->embed<AnchorFormat2> (this));
312 HBUINT16 format; /* Format identifier--format = 2 */
313 FWORD xCoordinate; /* Horizontal value--in design units */
314 FWORD yCoordinate; /* Vertical value--in design units */
315 HBUINT16 anchorPoint; /* Index to glyph contour point */
317 DEFINE_SIZE_STATIC (8);
322 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
323 float *x, float *y) const
325 hb_font_t *font = c->font;
326 *x = font->em_fscale_x (xCoordinate);
327 *y = font->em_fscale_y (yCoordinate);
329 if (font->x_ppem || font->num_coords)
330 *x += (this+xDeviceTable).get_x_delta (font, c->var_store);
331 if (font->y_ppem || font->num_coords)
332 *y += (this+yDeviceTable).get_y_delta (font, c->var_store);
335 bool sanitize (hb_sanitize_context_t *c) const
337 TRACE_SANITIZE (this);
338 return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
341 AnchorFormat3* copy (hb_serialize_context_t *c) const
343 TRACE_SERIALIZE (this);
344 auto *out = c->embed<AnchorFormat3> (this);
345 if (unlikely (!out)) return_trace (nullptr);
347 out->xDeviceTable.serialize_copy (c, xDeviceTable, this, out);
348 out->yDeviceTable.serialize_copy (c, yDeviceTable, this, out);
353 HBUINT16 format; /* Format identifier--format = 3 */
354 FWORD xCoordinate; /* Horizontal value--in design units */
355 FWORD yCoordinate; /* Vertical value--in design units */
357 xDeviceTable; /* Offset to Device table for X
358 * coordinate-- from beginning of
359 * Anchor table (may be NULL) */
361 yDeviceTable; /* Offset to Device table for Y
362 * coordinate-- from beginning of
363 * Anchor table (may be NULL) */
365 DEFINE_SIZE_STATIC (10);
370 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
371 float *x, float *y) const
375 case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
376 case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
377 case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
382 bool sanitize (hb_sanitize_context_t *c) const
384 TRACE_SANITIZE (this);
385 if (!u.format.sanitize (c)) return_trace (false);
387 case 1: return_trace (u.format1.sanitize (c));
388 case 2: return_trace (u.format2.sanitize (c));
389 case 3: return_trace (u.format3.sanitize (c));
390 default:return_trace (true);
394 Anchor* copy (hb_serialize_context_t *c) const
396 TRACE_SERIALIZE (this);
398 case 1: return_trace (reinterpret_cast<Anchor *> (u.format1.copy (c)));
399 case 2: return_trace (reinterpret_cast<Anchor *> (u.format2.copy (c)));
400 case 3: return_trace (reinterpret_cast<Anchor *> (u.format3.copy (c)));
401 default:return_trace (nullptr);
407 HBUINT16 format; /* Format identifier */
408 AnchorFormat1 format1;
409 AnchorFormat2 format2;
410 AnchorFormat3 format3;
413 DEFINE_SIZE_UNION (2, format);
419 const Anchor& get_anchor (unsigned int row, unsigned int col,
420 unsigned int cols, bool *found) const
423 if (unlikely (row >= rows || col >= cols)) return Null(Anchor);
424 *found = !matrixZ[row * cols + col].is_null ();
425 return this+matrixZ[row * cols + col];
428 bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
430 TRACE_SANITIZE (this);
431 if (!c->check_struct (this)) return_trace (false);
432 if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
433 unsigned int count = rows * cols;
434 if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
435 for (unsigned int i = 0; i < count; i++)
436 if (!matrixZ[i].sanitize (c, this)) return_trace (false);
440 HBUINT16 rows; /* Number of rows */
442 UnsizedArrayOf<OffsetTo<Anchor>>
443 matrixZ; /* Matrix of offsets to Anchor tables--
444 * from beginning of AnchorMatrix table */
446 DEFINE_SIZE_ARRAY (2, matrixZ);
452 friend struct MarkArray;
454 bool sanitize (hb_sanitize_context_t *c, const void *base) const
456 TRACE_SANITIZE (this);
457 return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
461 HBUINT16 klass; /* Class defined for this mark */
463 markAnchor; /* Offset to Anchor table--from
464 * beginning of MarkArray table */
466 DEFINE_SIZE_STATIC (4);
469 struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage order */
471 bool apply (hb_ot_apply_context_t *c,
472 unsigned int mark_index, unsigned int glyph_index,
473 const AnchorMatrix &anchors, unsigned int class_count,
474 unsigned int glyph_pos) const
477 hb_buffer_t *buffer = c->buffer;
478 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index);
479 unsigned int mark_class = record.klass;
481 const Anchor& mark_anchor = this + record.markAnchor;
483 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
484 /* If this subtable doesn't have an anchor for this base and this class,
485 * return false such that the subsequent subtables have a chance at it. */
486 if (unlikely (!found)) return_trace (false);
488 float mark_x, mark_y, base_x, base_y;
490 buffer->unsafe_to_break (glyph_pos, buffer->idx);
491 mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
492 glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
494 hb_glyph_position_t &o = buffer->cur_pos();
495 o.x_offset = roundf (base_x - mark_x);
496 o.y_offset = roundf (base_y - mark_y);
497 o.attach_type() = ATTACH_TYPE_MARK;
498 o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
499 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
505 bool sanitize (hb_sanitize_context_t *c) const
507 TRACE_SANITIZE (this);
508 return_trace (ArrayOf<MarkRecord>::sanitize (c, this));
515 struct SinglePosFormat1
517 bool intersects (const hb_set_t *glyphs) const
518 { return (this+coverage).intersects (glyphs); }
520 void collect_glyphs (hb_collect_glyphs_context_t *c) const
521 { if (unlikely (!(this+coverage).add_coverage (c->input))) return; }
523 const Coverage &get_coverage () const { return this+coverage; }
525 bool apply (hb_ot_apply_context_t *c) const
528 hb_buffer_t *buffer = c->buffer;
529 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
530 if (likely (index == NOT_COVERED)) return_trace (false);
532 valueFormat.apply_value (c, this, values, buffer->cur_pos());
538 template<typename Iterator,
539 hb_requires (hb_is_iterator (Iterator))>
540 void serialize (hb_serialize_context_t *c,
542 ValueFormat valFormat)
544 if (unlikely (!c->extend_min (*this))) return;
545 if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
547 for (const auto &_ : hb_second (*it))
552 | hb_map_retains_sorting (hb_first)
555 coverage.serialize (c, this).serialize (c, glyphs);
558 bool subset (hb_subset_context_t *c) const
561 const hb_set_t &glyphset = *c->plan->glyphset ();
562 const hb_map_t &glyph_map = *c->plan->glyph_map;
565 + hb_iter (this+coverage)
566 | hb_filter (glyphset)
567 | hb_map_retains_sorting (glyph_map)
568 | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
571 bool ret = bool (it);
572 SinglePos_serialize (c->serializer, it, valueFormat);
576 bool sanitize (hb_sanitize_context_t *c) const
578 TRACE_SANITIZE (this);
579 return_trace (c->check_struct (this) &&
580 coverage.sanitize (c, this) &&
581 valueFormat.sanitize_value (c, this, values));
585 HBUINT16 format; /* Format identifier--format = 1 */
587 coverage; /* Offset to Coverage table--from
588 * beginning of subtable */
589 ValueFormat valueFormat; /* Defines the types of data in the
591 ValueRecord values; /* Defines positioning
592 * value(s)--applied to all glyphs in
593 * the Coverage table */
595 DEFINE_SIZE_ARRAY (6, values);
598 struct SinglePosFormat2
600 bool intersects (const hb_set_t *glyphs) const
601 { return (this+coverage).intersects (glyphs); }
603 void collect_glyphs (hb_collect_glyphs_context_t *c) const
604 { if (unlikely (!(this+coverage).add_coverage (c->input))) return; }
606 const Coverage &get_coverage () const { return this+coverage; }
608 bool apply (hb_ot_apply_context_t *c) const
611 hb_buffer_t *buffer = c->buffer;
612 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
613 if (likely (index == NOT_COVERED)) return_trace (false);
615 if (likely (index >= valueCount)) return_trace (false);
617 valueFormat.apply_value (c, this,
618 &values[index * valueFormat.get_len ()],
625 template<typename Iterator,
626 hb_requires (hb_is_iterator (Iterator))>
627 void serialize (hb_serialize_context_t *c,
629 ValueFormat valFormat)
631 if (unlikely (!c->extend_min (*this))) return;
632 if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
633 if (unlikely (!c->check_assign (valueCount, it.len ()))) return;
635 for (const auto iter : it)
636 for (const auto &_ : iter.second)
641 | hb_map_retains_sorting (hb_first)
644 coverage.serialize (c, this).serialize (c, glyphs);
647 bool subset (hb_subset_context_t *c) const
650 const hb_set_t &glyphset = *c->plan->glyphset ();
651 const hb_map_t &glyph_map = *c->plan->glyph_map;
653 unsigned sub_length = valueFormat.get_len ();
654 auto values_array = values.as_array (valueCount * sub_length);
657 + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
658 | hb_filter (glyphset, hb_first)
659 | hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
661 return hb_pair (glyph_map[_.first],
662 values_array.sub_array (_.second * sub_length,
667 bool ret = bool (it);
668 SinglePos_serialize (c->serializer, it, valueFormat);
672 bool sanitize (hb_sanitize_context_t *c) const
674 TRACE_SANITIZE (this);
675 return_trace (c->check_struct (this) &&
676 coverage.sanitize (c, this) &&
677 valueFormat.sanitize_values (c, this, values, valueCount));
681 HBUINT16 format; /* Format identifier--format = 2 */
683 coverage; /* Offset to Coverage table--from
684 * beginning of subtable */
685 ValueFormat valueFormat; /* Defines the types of data in the
687 HBUINT16 valueCount; /* Number of ValueRecords */
688 ValueRecord values; /* Array of ValueRecords--positioning
689 * values applied to glyphs */
691 DEFINE_SIZE_ARRAY (8, values);
696 template<typename Iterator,
697 hb_requires (hb_is_iterator (Iterator))>
698 unsigned get_format (Iterator glyph_val_iter_pairs)
700 hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
702 for (const auto iter : glyph_val_iter_pairs)
703 for (const auto _ : hb_zip (iter.second, first_val_iter))
704 if (_.first != _.second)
711 template<typename Iterator,
712 hb_requires (hb_is_iterator (Iterator))>
713 void serialize (hb_serialize_context_t *c,
714 Iterator glyph_val_iter_pairs,
715 ValueFormat valFormat)
717 if (unlikely (!c->extend_min (u.format))) return;
720 if (glyph_val_iter_pairs) format = get_format (glyph_val_iter_pairs);
724 case 1: u.format1.serialize (c, glyph_val_iter_pairs, valFormat);
726 case 2: u.format2.serialize (c, glyph_val_iter_pairs, valFormat);
732 template <typename context_t, typename ...Ts>
733 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
735 TRACE_DISPATCH (this, u.format);
736 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
738 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
739 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
740 default:return_trace (c->default_return_value ());
746 HBUINT16 format; /* Format identifier */
747 SinglePosFormat1 format1;
748 SinglePosFormat2 format2;
752 template<typename Iterator>
754 SinglePos_serialize (hb_serialize_context_t *c,
756 ValueFormat valFormat)
757 { c->start_embed<SinglePos> ()->serialize (c, it, valFormat); }
760 struct PairValueRecord
762 friend struct PairSet;
764 bool serialize (hb_serialize_context_t *c,
766 const hb_map_t &glyph_map) const
768 TRACE_SERIALIZE (this);
769 auto *out = c->start_embed (*this);
770 if (unlikely (!c->extend_min (out))) return_trace (false);
772 out->secondGlyph = glyph_map[secondGlyph];
773 return_trace (c->copy (values, length));
777 HBGlyphID secondGlyph; /* GlyphID of second glyph in the
778 * pair--first glyph is listed in the
780 ValueRecord values; /* Positioning data for the first glyph
781 * followed by for second glyph */
783 DEFINE_SIZE_ARRAY (2, values);
788 friend struct PairPosFormat1;
790 bool intersects (const hb_set_t *glyphs,
791 const ValueFormat *valueFormats) const
793 unsigned int len1 = valueFormats[0].get_len ();
794 unsigned int len2 = valueFormats[1].get_len ();
795 unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
797 const PairValueRecord *record = &firstPairValueRecord;
798 unsigned int count = len;
799 for (unsigned int i = 0; i < count; i++)
801 if (glyphs->has (record->secondGlyph))
803 record = &StructAtOffset<const PairValueRecord> (record, record_size);
808 void collect_glyphs (hb_collect_glyphs_context_t *c,
809 const ValueFormat *valueFormats) const
811 unsigned int len1 = valueFormats[0].get_len ();
812 unsigned int len2 = valueFormats[1].get_len ();
813 unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
815 const PairValueRecord *record = &firstPairValueRecord;
816 c->input->add_array (&record->secondGlyph, len, record_size);
819 bool apply (hb_ot_apply_context_t *c,
820 const ValueFormat *valueFormats,
821 unsigned int pos) const
824 hb_buffer_t *buffer = c->buffer;
825 unsigned int len1 = valueFormats[0].get_len ();
826 unsigned int len2 = valueFormats[1].get_len ();
827 unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
829 unsigned int count = len;
831 /* Hand-coded bsearch. */
832 if (unlikely (!count))
833 return_trace (false);
834 hb_codepoint_t x = buffer->info[pos].codepoint;
835 int min = 0, max = (int) count - 1;
838 int mid = ((unsigned int) min + (unsigned int) max) / 2;
839 const PairValueRecord *record = &StructAtOffset<PairValueRecord> (&firstPairValueRecord, record_size * mid);
840 hb_codepoint_t mid_x = record->secondGlyph;
847 /* Note the intentional use of "|" instead of short-circuit "||". */
848 if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
849 valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
850 buffer->unsafe_to_break (buffer->idx, pos + 1);
858 return_trace (false);
861 bool subset (hb_subset_context_t *c,
862 const ValueFormat valueFormats[2]) const
865 auto snap = c->serializer->snapshot ();
867 auto *out = c->serializer->start_embed (*this);
868 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
871 const hb_set_t &glyphset = *c->plan->glyphset ();
872 const hb_map_t &glyph_map = *c->plan->glyph_map;
874 unsigned len1 = valueFormats[0].get_len ();
875 unsigned len2 = valueFormats[1].get_len ();
876 unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
878 const PairValueRecord *record = &firstPairValueRecord;
879 unsigned count = len, num = 0;
880 for (unsigned i = 0; i < count; i++)
882 if (!glyphset.has (record->secondGlyph)) continue;
883 if (record->serialize (c->serializer, len1 + len2, glyph_map)) num++;
884 record = &StructAtOffset<const PairValueRecord> (record, record_size);
888 if (!num) c->serializer->revert (snap);
892 struct sanitize_closure_t
895 const ValueFormat *valueFormats;
896 unsigned int len1; /* valueFormats[0].get_len() */
897 unsigned int stride; /* 1 + len1 + len2 */
900 bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
902 TRACE_SANITIZE (this);
903 if (!(c->check_struct (this)
904 && c->check_range (&firstPairValueRecord,
906 HBUINT16::static_size,
907 closure->stride))) return_trace (false);
909 unsigned int count = len;
910 const PairValueRecord *record = &firstPairValueRecord;
911 return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) &&
912 closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride));
916 HBUINT16 len; /* Number of PairValueRecords */
917 PairValueRecord firstPairValueRecord;
918 /* Array of PairValueRecords--ordered
919 * by GlyphID of the second glyph */
924 struct PairPosFormat1
926 bool intersects (const hb_set_t *glyphs) const
929 + hb_zip (this+coverage, pairSet)
930 | hb_filter (*glyphs, hb_first)
932 | hb_map ([glyphs, this] (const OffsetTo<PairSet> &_)
933 { return (this+_).intersects (glyphs, valueFormat); })
938 void collect_glyphs (hb_collect_glyphs_context_t *c) const
940 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
941 unsigned int count = pairSet.len;
942 for (unsigned int i = 0; i < count; i++)
943 (this+pairSet[i]).collect_glyphs (c, valueFormat);
946 const Coverage &get_coverage () const { return this+coverage; }
948 bool apply (hb_ot_apply_context_t *c) const
951 hb_buffer_t *buffer = c->buffer;
952 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
953 if (likely (index == NOT_COVERED)) return_trace (false);
955 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
956 skippy_iter.reset (buffer->idx, 1);
957 if (!skippy_iter.next ()) return_trace (false);
959 return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
962 bool subset (hb_subset_context_t *c) const
966 const hb_set_t &glyphset = *c->plan->glyphset ();
967 const hb_map_t &glyph_map = *c->plan->glyph_map;
969 auto *out = c->serializer->start_embed (*this);
970 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
971 out->format = format;
972 out->valueFormat[0] = valueFormat[0];
973 out->valueFormat[1] = valueFormat[1];
975 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
977 + hb_zip (this+coverage, pairSet)
978 | hb_filter (glyphset, hb_first)
979 | hb_filter ([this, c, out] (const OffsetTo<PairSet>& _)
981 auto *o = out->pairSet.serialize_append (c->serializer);
982 if (unlikely (!o)) return false;
983 auto snap = c->serializer->snapshot ();
984 bool ret = o->serialize_subset (c, _, this, out, valueFormat);
988 c->serializer->revert (snap);
995 | hb_sink (new_coverage)
998 out->coverage.serialize (c->serializer, out)
999 .serialize (c->serializer, new_coverage.iter ());
1001 return_trace (bool (new_coverage));
1004 bool sanitize (hb_sanitize_context_t *c) const
1006 TRACE_SANITIZE (this);
1008 if (!c->check_struct (this)) return_trace (false);
1010 unsigned int len1 = valueFormat[0].get_len ();
1011 unsigned int len2 = valueFormat[1].get_len ();
1012 PairSet::sanitize_closure_t closure =
1020 return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
1024 HBUINT16 format; /* Format identifier--format = 1 */
1026 coverage; /* Offset to Coverage table--from
1027 * beginning of subtable */
1028 ValueFormat valueFormat[2]; /* [0] Defines the types of data in
1029 * ValueRecord1--for the first glyph
1030 * in the pair--may be zero (0) */
1031 /* [1] Defines the types of data in
1032 * ValueRecord2--for the second glyph
1033 * in the pair--may be zero (0) */
1034 OffsetArrayOf<PairSet>
1035 pairSet; /* Array of PairSet tables
1036 * ordered by Coverage Index */
1038 DEFINE_SIZE_ARRAY (10, pairSet);
1041 struct PairPosFormat2
1043 bool intersects (const hb_set_t *glyphs) const
1045 return (this+coverage).intersects (glyphs) &&
1046 (this+classDef2).intersects (glyphs);
1049 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1051 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
1052 if (unlikely (!(this+classDef2).add_coverage (c->input))) return;
1055 const Coverage &get_coverage () const { return this+coverage; }
1057 bool apply (hb_ot_apply_context_t *c) const
1060 hb_buffer_t *buffer = c->buffer;
1061 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
1062 if (likely (index == NOT_COVERED)) return_trace (false);
1064 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1065 skippy_iter.reset (buffer->idx, 1);
1066 if (!skippy_iter.next ()) return_trace (false);
1068 unsigned int len1 = valueFormat1.get_len ();
1069 unsigned int len2 = valueFormat2.get_len ();
1070 unsigned int record_len = len1 + len2;
1072 unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
1073 unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
1074 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
1076 const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
1077 /* Note the intentional use of "|" instead of short-circuit "||". */
1078 if (valueFormat1.apply_value (c, this, v, buffer->cur_pos()) |
1079 valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]))
1080 buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
1082 buffer->idx = skippy_iter.idx;
1086 return_trace (true);
1089 bool subset (hb_subset_context_t *c) const
1091 TRACE_SUBSET (this);
1092 auto *out = c->serializer->start_embed (*this);
1093 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1094 out->format = format;
1095 out->valueFormat1 = valueFormat1;
1096 out->valueFormat2 = valueFormat2;
1098 hb_map_t klass1_map;
1099 out->classDef1.serialize_subset (c, classDef1, this, out, &klass1_map);
1100 out->class1Count = klass1_map.get_population ();
1102 hb_map_t klass2_map;
1103 out->classDef2.serialize_subset (c, classDef2, this, out, &klass2_map);
1104 out->class2Count = klass2_map.get_population ();
1106 unsigned record_len = valueFormat1.get_len () + valueFormat2.get_len ();
1108 + hb_range ((unsigned) class1Count)
1109 | hb_filter (klass1_map)
1110 | hb_apply ([&] (const unsigned class1_idx)
1112 + hb_range ((unsigned) class2Count)
1113 | hb_filter (klass2_map)
1114 | hb_apply ([&] (const unsigned class2_idx)
1116 unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_len;
1117 for (unsigned i = 0; i < record_len; i++)
1118 c->serializer->copy (values[idx+i]);
1124 const hb_set_t &glyphset = *c->plan->glyphset ();
1125 const hb_map_t &glyph_map = *c->plan->glyph_map;
1128 + hb_iter (this+coverage)
1129 | hb_filter (glyphset)
1130 | hb_map_retains_sorting (glyph_map)
1133 out->coverage.serialize (c->serializer, out).serialize (c->serializer, it);
1134 return_trace (out->class1Count && out->class2Count && bool (it));
1137 bool sanitize (hb_sanitize_context_t *c) const
1139 TRACE_SANITIZE (this);
1140 if (!(c->check_struct (this)
1141 && coverage.sanitize (c, this)
1142 && classDef1.sanitize (c, this)
1143 && classDef2.sanitize (c, this))) return_trace (false);
1145 unsigned int len1 = valueFormat1.get_len ();
1146 unsigned int len2 = valueFormat2.get_len ();
1147 unsigned int stride = len1 + len2;
1148 unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
1149 unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
1150 return_trace (c->check_range ((const void *) values,
1153 valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
1154 valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
1158 HBUINT16 format; /* Format identifier--format = 2 */
1160 coverage; /* Offset to Coverage table--from
1161 * beginning of subtable */
1162 ValueFormat valueFormat1; /* ValueRecord definition--for the
1163 * first glyph of the pair--may be zero
1165 ValueFormat valueFormat2; /* ValueRecord definition--for the
1166 * second glyph of the pair--may be
1169 classDef1; /* Offset to ClassDef table--from
1170 * beginning of PairPos subtable--for
1171 * the first glyph of the pair */
1173 classDef2; /* Offset to ClassDef table--from
1174 * beginning of PairPos subtable--for
1175 * the second glyph of the pair */
1176 HBUINT16 class1Count; /* Number of classes in ClassDef1
1177 * table--includes Class0 */
1178 HBUINT16 class2Count; /* Number of classes in ClassDef2
1179 * table--includes Class0 */
1180 ValueRecord values; /* Matrix of value pairs:
1181 * class1-major, class2-minor,
1182 * Each entry has value1 and value2 */
1184 DEFINE_SIZE_ARRAY (16, values);
1189 template <typename context_t, typename ...Ts>
1190 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1192 TRACE_DISPATCH (this, u.format);
1193 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1195 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1196 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
1197 default:return_trace (c->default_return_value ());
1203 HBUINT16 format; /* Format identifier */
1204 PairPosFormat1 format1;
1205 PairPosFormat2 format2;
1210 struct EntryExitRecord
1212 friend struct CursivePosFormat1;
1214 bool sanitize (hb_sanitize_context_t *c, const void *base) const
1216 TRACE_SANITIZE (this);
1217 return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
1220 EntryExitRecord* copy (hb_serialize_context_t *c,
1221 const void *src_base,
1222 const void *dst_base) const
1224 TRACE_SERIALIZE (this);
1225 auto *out = c->embed (this);
1226 if (unlikely (!out)) return_trace (nullptr);
1228 out->entryAnchor.serialize_copy (c, entryAnchor, src_base, dst_base);
1229 out->exitAnchor.serialize_copy (c, exitAnchor, src_base, dst_base);
1235 entryAnchor; /* Offset to EntryAnchor table--from
1236 * beginning of CursivePos
1237 * subtable--may be NULL */
1239 exitAnchor; /* Offset to ExitAnchor table--from
1240 * beginning of CursivePos
1241 * subtable--may be NULL */
1243 DEFINE_SIZE_STATIC (4);
1247 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent);
1249 struct CursivePosFormat1
1251 bool intersects (const hb_set_t *glyphs) const
1252 { return (this+coverage).intersects (glyphs); }
1254 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1255 { if (unlikely (!(this+coverage).add_coverage (c->input))) return; }
1257 const Coverage &get_coverage () const { return this+coverage; }
1259 bool apply (hb_ot_apply_context_t *c) const
1262 hb_buffer_t *buffer = c->buffer;
1264 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
1265 if (!this_record.entryAnchor) return_trace (false);
1267 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1268 skippy_iter.reset (buffer->idx, 1);
1269 if (!skippy_iter.prev ()) return_trace (false);
1271 const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
1272 if (!prev_record.exitAnchor) return_trace (false);
1274 unsigned int i = skippy_iter.idx;
1275 unsigned int j = buffer->idx;
1277 buffer->unsafe_to_break (i, j);
1278 float entry_x, entry_y, exit_x, exit_y;
1279 (this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
1280 (this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
1282 hb_glyph_position_t *pos = buffer->pos;
1285 /* Main-direction adjustment */
1286 switch (c->direction) {
1287 case HB_DIRECTION_LTR:
1288 pos[i].x_advance = roundf (exit_x) + pos[i].x_offset;
1290 d = roundf (entry_x) + pos[j].x_offset;
1291 pos[j].x_advance -= d;
1292 pos[j].x_offset -= d;
1294 case HB_DIRECTION_RTL:
1295 d = roundf (exit_x) + pos[i].x_offset;
1296 pos[i].x_advance -= d;
1297 pos[i].x_offset -= d;
1299 pos[j].x_advance = roundf (entry_x) + pos[j].x_offset;
1301 case HB_DIRECTION_TTB:
1302 pos[i].y_advance = roundf (exit_y) + pos[i].y_offset;
1304 d = roundf (entry_y) + pos[j].y_offset;
1305 pos[j].y_advance -= d;
1306 pos[j].y_offset -= d;
1308 case HB_DIRECTION_BTT:
1309 d = roundf (exit_y) + pos[i].y_offset;
1310 pos[i].y_advance -= d;
1311 pos[i].y_offset -= d;
1313 pos[j].y_advance = roundf (entry_y);
1315 case HB_DIRECTION_INVALID:
1320 /* Cross-direction adjustment */
1322 /* We attach child to parent (think graph theory and rooted trees whereas
1323 * the root stays on baseline and each node aligns itself against its
1326 * Optimize things for the case of RightToLeft, as that's most common in
1328 unsigned int child = i;
1329 unsigned int parent = j;
1330 hb_position_t x_offset = entry_x - exit_x;
1331 hb_position_t y_offset = entry_y - exit_y;
1332 if (!(c->lookup_props & LookupFlag::RightToLeft))
1334 unsigned int k = child;
1337 x_offset = -x_offset;
1338 y_offset = -y_offset;
1341 /* If child was already connected to someone else, walk through its old
1342 * chain and reverse the link direction, such that the whole tree of its
1343 * previous connection now attaches to new parent. Watch out for case
1344 * where new parent is on the path from old chain...
1346 reverse_cursive_minor_offset (pos, child, c->direction, parent);
1348 pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
1349 pos[child].attach_chain() = (int) parent - (int) child;
1350 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
1351 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
1352 pos[child].y_offset = y_offset;
1354 pos[child].x_offset = x_offset;
1357 return_trace (true);
1360 template <typename Iterator,
1361 hb_requires (hb_is_iterator (Iterator))>
1362 void serialize (hb_serialize_context_t *c,
1364 const void *src_base)
1366 if (unlikely (!c->extend_min ((*this)))) return;
1368 this->entryExitRecord.len = it.len ();
1370 for (const EntryExitRecord& entry_record : + it
1371 | hb_map (hb_second))
1372 c->copy (entry_record, src_base, this);
1376 | hb_map_retains_sorting (hb_first)
1379 coverage.serialize (c, this).serialize (c, glyphs);
1382 bool subset (hb_subset_context_t *c) const
1384 TRACE_SUBSET (this);
1385 const hb_set_t &glyphset = *c->plan->glyphset ();
1386 const hb_map_t &glyph_map = *c->plan->glyph_map;
1388 auto *out = c->serializer->start_embed (*this);
1389 if (unlikely (!out)) return_trace (false);
1392 + hb_zip (this+coverage, entryExitRecord)
1393 | hb_filter (glyphset, hb_first)
1394 | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
1395 { return hb_pair (glyph_map[p.first], p.second);})
1398 bool ret = bool (it);
1399 out->serialize (c->serializer, it, this);
1403 bool sanitize (hb_sanitize_context_t *c) const
1405 TRACE_SANITIZE (this);
1406 return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
1410 HBUINT16 format; /* Format identifier--format = 1 */
1412 coverage; /* Offset to Coverage table--from
1413 * beginning of subtable */
1414 ArrayOf<EntryExitRecord>
1415 entryExitRecord; /* Array of EntryExit records--in
1416 * Coverage Index order */
1418 DEFINE_SIZE_ARRAY (6, entryExitRecord);
1423 template <typename context_t, typename ...Ts>
1424 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1426 TRACE_DISPATCH (this, u.format);
1427 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1429 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1430 default:return_trace (c->default_return_value ());
1436 HBUINT16 format; /* Format identifier */
1437 CursivePosFormat1 format1;
1442 typedef AnchorMatrix BaseArray; /* base-major--
1443 * in order of BaseCoverage Index--,
1445 * ordered by class--zero-based. */
1447 struct MarkBasePosFormat1
1449 bool intersects (const hb_set_t *glyphs) const
1450 { return (this+markCoverage).intersects (glyphs) &&
1451 (this+baseCoverage).intersects (glyphs); }
1453 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1455 if (unlikely (!(this+markCoverage).add_coverage (c->input))) return;
1456 if (unlikely (!(this+baseCoverage).add_coverage (c->input))) return;
1459 const Coverage &get_coverage () const { return this+markCoverage; }
1461 bool apply (hb_ot_apply_context_t *c) const
1464 hb_buffer_t *buffer = c->buffer;
1465 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
1466 if (likely (mark_index == NOT_COVERED)) return_trace (false);
1468 /* Now we search backwards for a non-mark glyph */
1469 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1470 skippy_iter.reset (buffer->idx, 1);
1471 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
1473 if (!skippy_iter.prev ()) return_trace (false);
1474 /* We only want to attach to the first of a MultipleSubst sequence.
1475 * https://github.com/harfbuzz/harfbuzz/issues/740
1477 * ...but stop if we find a mark in the MultipleSubst sequence:
1478 * https://github.com/harfbuzz/harfbuzz/issues/1020 */
1479 if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
1480 0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
1481 (skippy_iter.idx == 0 ||
1482 _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
1483 _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
1484 _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
1485 _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
1486 _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
1489 skippy_iter.reject ();
1492 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1493 //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1495 unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint);
1496 if (base_index == NOT_COVERED) return_trace (false);
1498 return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
1501 bool subset (hb_subset_context_t *c) const
1503 TRACE_SUBSET (this);
1505 return_trace (false);
1508 bool sanitize (hb_sanitize_context_t *c) const
1510 TRACE_SANITIZE (this);
1511 return_trace (c->check_struct (this) &&
1512 markCoverage.sanitize (c, this) &&
1513 baseCoverage.sanitize (c, this) &&
1514 markArray.sanitize (c, this) &&
1515 baseArray.sanitize (c, this, (unsigned int) classCount));
1519 HBUINT16 format; /* Format identifier--format = 1 */
1521 markCoverage; /* Offset to MarkCoverage table--from
1522 * beginning of MarkBasePos subtable */
1524 baseCoverage; /* Offset to BaseCoverage table--from
1525 * beginning of MarkBasePos subtable */
1526 HBUINT16 classCount; /* Number of classes defined for marks */
1528 markArray; /* Offset to MarkArray table--from
1529 * beginning of MarkBasePos subtable */
1531 baseArray; /* Offset to BaseArray table--from
1532 * beginning of MarkBasePos subtable */
1534 DEFINE_SIZE_STATIC (12);
1539 template <typename context_t, typename ...Ts>
1540 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1542 TRACE_DISPATCH (this, u.format);
1543 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1545 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1546 default:return_trace (c->default_return_value ());
1552 HBUINT16 format; /* Format identifier */
1553 MarkBasePosFormat1 format1;
1558 typedef AnchorMatrix LigatureAttach; /* component-major--
1559 * in order of writing direction--,
1561 * ordered by class--zero-based. */
1563 typedef OffsetListOf<LigatureAttach> LigatureArray;
1564 /* Array of LigatureAttach
1566 * LigatureCoverage Index */
1568 struct MarkLigPosFormat1
1570 bool intersects (const hb_set_t *glyphs) const
1571 { return (this+markCoverage).intersects (glyphs) &&
1572 (this+ligatureCoverage).intersects (glyphs); }
1574 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1576 if (unlikely (!(this+markCoverage).add_coverage (c->input))) return;
1577 if (unlikely (!(this+ligatureCoverage).add_coverage (c->input))) return;
1580 const Coverage &get_coverage () const { return this+markCoverage; }
1582 bool apply (hb_ot_apply_context_t *c) const
1585 hb_buffer_t *buffer = c->buffer;
1586 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
1587 if (likely (mark_index == NOT_COVERED)) return_trace (false);
1589 /* Now we search backwards for a non-mark glyph */
1590 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1591 skippy_iter.reset (buffer->idx, 1);
1592 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
1593 if (!skippy_iter.prev ()) return_trace (false);
1595 /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
1596 //if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1598 unsigned int j = skippy_iter.idx;
1599 unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[j].codepoint);
1600 if (lig_index == NOT_COVERED) return_trace (false);
1602 const LigatureArray& lig_array = this+ligatureArray;
1603 const LigatureAttach& lig_attach = lig_array[lig_index];
1605 /* Find component to attach to */
1606 unsigned int comp_count = lig_attach.rows;
1607 if (unlikely (!comp_count)) return_trace (false);
1609 /* We must now check whether the ligature ID of the current mark glyph
1610 * is identical to the ligature ID of the found ligature. If yes, we
1611 * can directly use the component index. If not, we attach the mark
1612 * glyph to the last component of the ligature. */
1613 unsigned int comp_index;
1614 unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]);
1615 unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1616 unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1617 if (lig_id && lig_id == mark_id && mark_comp > 0)
1618 comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
1620 comp_index = comp_count - 1;
1622 return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
1625 bool subset (hb_subset_context_t *c) const
1627 TRACE_SUBSET (this);
1629 return_trace (false);
1632 bool sanitize (hb_sanitize_context_t *c) const
1634 TRACE_SANITIZE (this);
1635 return_trace (c->check_struct (this) &&
1636 markCoverage.sanitize (c, this) &&
1637 ligatureCoverage.sanitize (c, this) &&
1638 markArray.sanitize (c, this) &&
1639 ligatureArray.sanitize (c, this, (unsigned int) classCount));
1643 HBUINT16 format; /* Format identifier--format = 1 */
1645 markCoverage; /* Offset to Mark Coverage table--from
1646 * beginning of MarkLigPos subtable */
1648 ligatureCoverage; /* Offset to Ligature Coverage
1649 * table--from beginning of MarkLigPos
1651 HBUINT16 classCount; /* Number of defined mark classes */
1653 markArray; /* Offset to MarkArray table--from
1654 * beginning of MarkLigPos subtable */
1655 OffsetTo<LigatureArray>
1656 ligatureArray; /* Offset to LigatureArray table--from
1657 * beginning of MarkLigPos subtable */
1659 DEFINE_SIZE_STATIC (12);
1664 template <typename context_t, typename ...Ts>
1665 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1667 TRACE_DISPATCH (this, u.format);
1668 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1670 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1671 default:return_trace (c->default_return_value ());
1677 HBUINT16 format; /* Format identifier */
1678 MarkLigPosFormat1 format1;
1683 typedef AnchorMatrix Mark2Array; /* mark2-major--
1684 * in order of Mark2Coverage Index--,
1686 * ordered by class--zero-based. */
1688 struct MarkMarkPosFormat1
1690 bool intersects (const hb_set_t *glyphs) const
1691 { return (this+mark1Coverage).intersects (glyphs) &&
1692 (this+mark2Coverage).intersects (glyphs); }
1694 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1696 if (unlikely (!(this+mark1Coverage).add_coverage (c->input))) return;
1697 if (unlikely (!(this+mark2Coverage).add_coverage (c->input))) return;
1700 const Coverage &get_coverage () const { return this+mark1Coverage; }
1702 bool apply (hb_ot_apply_context_t *c) const
1705 hb_buffer_t *buffer = c->buffer;
1706 unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint);
1707 if (likely (mark1_index == NOT_COVERED)) return_trace (false);
1709 /* now we search backwards for a suitable mark glyph until a non-mark glyph */
1710 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1711 skippy_iter.reset (buffer->idx, 1);
1712 skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
1713 if (!skippy_iter.prev ()) return_trace (false);
1715 if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1717 unsigned int j = skippy_iter.idx;
1719 unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
1720 unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
1721 unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
1722 unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
1724 if (likely (id1 == id2)) {
1725 if (id1 == 0) /* Marks belonging to the same base. */
1727 else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
1730 /* If ligature ids don't match, it may be the case that one of the marks
1731 * itself is a ligature. In which case match. */
1732 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
1737 return_trace (false);
1740 unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint);
1741 if (mark2_index == NOT_COVERED) return_trace (false);
1743 return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
1746 bool subset (hb_subset_context_t *c) const
1748 TRACE_SUBSET (this);
1750 return_trace (false);
1753 bool sanitize (hb_sanitize_context_t *c) const
1755 TRACE_SANITIZE (this);
1756 return_trace (c->check_struct (this) &&
1757 mark1Coverage.sanitize (c, this) &&
1758 mark2Coverage.sanitize (c, this) &&
1759 mark1Array.sanitize (c, this) &&
1760 mark2Array.sanitize (c, this, (unsigned int) classCount));
1764 HBUINT16 format; /* Format identifier--format = 1 */
1766 mark1Coverage; /* Offset to Combining Mark1 Coverage
1767 * table--from beginning of MarkMarkPos
1770 mark2Coverage; /* Offset to Combining Mark2 Coverage
1771 * table--from beginning of MarkMarkPos
1773 HBUINT16 classCount; /* Number of defined mark classes */
1775 mark1Array; /* Offset to Mark1Array table--from
1776 * beginning of MarkMarkPos subtable */
1777 OffsetTo<Mark2Array>
1778 mark2Array; /* Offset to Mark2Array table--from
1779 * beginning of MarkMarkPos subtable */
1781 DEFINE_SIZE_STATIC (12);
1786 template <typename context_t, typename ...Ts>
1787 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1789 TRACE_DISPATCH (this, u.format);
1790 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1792 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1793 default:return_trace (c->default_return_value ());
1799 HBUINT16 format; /* Format identifier */
1800 MarkMarkPosFormat1 format1;
1805 struct ContextPos : Context {};
1807 struct ChainContextPos : ChainContext {};
1809 struct ExtensionPos : Extension<ExtensionPos>
1811 typedef struct PosLookupSubTable SubTable;
1821 struct PosLookupSubTable
1823 friend struct Lookup;
1824 friend struct PosLookup;
1838 template <typename context_t, typename ...Ts>
1839 typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
1841 TRACE_DISPATCH (this, lookup_type);
1842 switch (lookup_type) {
1843 case Single: return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
1844 case Pair: return_trace (u.pair.dispatch (c, hb_forward<Ts> (ds)...));
1845 case Cursive: return_trace (u.cursive.dispatch (c, hb_forward<Ts> (ds)...));
1846 case MarkBase: return_trace (u.markBase.dispatch (c, hb_forward<Ts> (ds)...));
1847 case MarkLig: return_trace (u.markLig.dispatch (c, hb_forward<Ts> (ds)...));
1848 case MarkMark: return_trace (u.markMark.dispatch (c, hb_forward<Ts> (ds)...));
1849 case Context: return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
1850 case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
1851 case Extension: return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
1852 default: return_trace (c->default_return_value ());
1861 MarkBasePos markBase;
1863 MarkMarkPos markMark;
1865 ChainContextPos chainContext;
1866 ExtensionPos extension;
1869 DEFINE_SIZE_MIN (0);
1873 struct PosLookup : Lookup
1875 typedef struct PosLookupSubTable SubTable;
1877 const SubTable& get_subtable (unsigned int i) const
1878 { return Lookup::get_subtable<SubTable> (i); }
1880 bool is_reverse () const
1885 bool apply (hb_ot_apply_context_t *c) const
1888 return_trace (dispatch (c));
1891 bool intersects (const hb_set_t *glyphs) const
1893 hb_intersects_context_t c (glyphs);
1894 return dispatch (&c);
1897 hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
1898 { return dispatch (c); }
1900 template <typename set_t>
1901 void add_coverage (set_t *glyphs) const
1903 hb_add_coverage_context_t<set_t> c (glyphs);
1907 HB_INTERNAL static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
1909 template <typename context_t>
1910 static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
1912 template <typename context_t, typename ...Ts>
1913 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1914 { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
1916 bool subset (hb_subset_context_t *c) const
1917 { return Lookup::subset<SubTable> (c); }
1919 bool sanitize (hb_sanitize_context_t *c) const
1920 { return Lookup::sanitize<SubTable> (c); }
1924 * GPOS -- Glyph Positioning
1925 * https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
1928 struct GPOS : GSUBGPOS
1930 static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
1932 const PosLookup& get_lookup (unsigned int i) const
1933 { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); }
1935 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
1936 static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
1937 static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
1939 bool subset (hb_subset_context_t *c) const
1940 { return GSUBGPOS::subset<PosLookup> (c); }
1942 bool sanitize (hb_sanitize_context_t *c) const
1943 { return GSUBGPOS::sanitize<PosLookup> (c); }
1945 HB_INTERNAL bool is_blacklisted (hb_blob_t *blob,
1946 hb_face_t *face) const;
1948 typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
1953 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent)
1955 int chain = pos[i].attach_chain(), type = pos[i].attach_type();
1956 if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
1959 pos[i].attach_chain() = 0;
1961 unsigned int j = (int) i + chain;
1963 /* Stop if we see new parent in the chain. */
1964 if (j == new_parent)
1967 reverse_cursive_minor_offset (pos, j, direction, new_parent);
1969 if (HB_DIRECTION_IS_HORIZONTAL (direction))
1970 pos[j].y_offset = -pos[i].y_offset;
1972 pos[j].x_offset = -pos[i].x_offset;
1974 pos[j].attach_chain() = -chain;
1975 pos[j].attach_type() = type;
1978 propagate_attachment_offsets (hb_glyph_position_t *pos,
1981 hb_direction_t direction)
1983 /* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
1984 * offset of glyph they are attached to. */
1985 int chain = pos[i].attach_chain(), type = pos[i].attach_type();
1986 if (likely (!chain))
1989 pos[i].attach_chain() = 0;
1991 unsigned int j = (int) i + chain;
1993 if (unlikely (j >= len))
1996 propagate_attachment_offsets (pos, len, j, direction);
1998 assert (!!(type & ATTACH_TYPE_MARK) ^ !!(type & ATTACH_TYPE_CURSIVE));
2000 if (type & ATTACH_TYPE_CURSIVE)
2002 if (HB_DIRECTION_IS_HORIZONTAL (direction))
2003 pos[i].y_offset += pos[j].y_offset;
2005 pos[i].x_offset += pos[j].x_offset;
2007 else /*if (type & ATTACH_TYPE_MARK)*/
2009 pos[i].x_offset += pos[j].x_offset;
2010 pos[i].y_offset += pos[j].y_offset;
2013 if (HB_DIRECTION_IS_FORWARD (direction))
2014 for (unsigned int k = j; k < i; k++) {
2015 pos[i].x_offset -= pos[k].x_advance;
2016 pos[i].y_offset -= pos[k].y_advance;
2019 for (unsigned int k = j + 1; k < i + 1; k++) {
2020 pos[i].x_offset += pos[k].x_advance;
2021 pos[i].y_offset += pos[k].y_advance;
2027 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2029 unsigned int count = buffer->len;
2030 for (unsigned int i = 0; i < count; i++)
2031 buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
2035 GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
2037 //_hb_buffer_assert_gsubgpos_vars (buffer);
2041 GPOS::position_finish_offsets (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2043 _hb_buffer_assert_gsubgpos_vars (buffer);
2046 hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
2047 hb_direction_t direction = buffer->props.direction;
2049 /* Handle attachments */
2050 if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
2051 for (unsigned int i = 0; i < len; i++)
2052 propagate_attachment_offsets (pos, len, i, direction);
2056 struct GPOS_accelerator_t : GPOS::accelerator_t {};
2059 /* Out-of-class implementation for methods recursing */
2061 #ifndef HB_NO_OT_LAYOUT
2062 template <typename context_t>
2063 /*static*/ inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
2065 const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2066 return l.dispatch (c);
2068 /*static*/ inline bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
2070 const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2071 unsigned int saved_lookup_props = c->lookup_props;
2072 unsigned int saved_lookup_index = c->lookup_index;
2073 c->set_lookup_index (lookup_index);
2074 c->set_lookup_props (l.get_props ());
2075 bool ret = l.dispatch (c);
2076 c->set_lookup_index (saved_lookup_index);
2077 c->set_lookup_props (saved_lookup_props);
2083 } /* namespace OT */
2086 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */