2 * Copyright © 2007,2008,2009 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
36 #include "hb-bimap.hh"
38 #include "OT/Layout/Common/Coverage.hh"
39 #include "OT/Layout/types.hh"
41 // TODO(garretrieger): cleanup these after migration.
42 using OT::Layout::Common::Coverage;
43 using OT::Layout::Common::RangeRecord;
44 using OT::Layout::SmallTypes;
45 using OT::Layout::MediumTypes;
50 template<typename Iterator>
51 static inline bool ClassDef_serialize (hb_serialize_context_t *c,
54 static bool ClassDef_remap_and_serialize (
55 hb_serialize_context_t *c,
56 const hb_set_t &klasses,
58 hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
59 hb_map_t *klass_map /*IN/OUT*/);
61 struct hb_collect_feature_substitutes_with_var_context_t
63 const hb_map_t *axes_index_tag_map;
64 const hb_hashmap_t<hb_tag_t, Triple> *axes_location;
65 hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
66 hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
67 bool& insert_catch_all_feature_variation_record;
69 // not stored in subset_plan
70 hb_set_t *feature_indices;
72 bool variation_applied;
74 unsigned cur_record_idx;
75 hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> *conditionset_map;
78 struct hb_prune_langsys_context_t
80 hb_prune_langsys_context_t (const void *table_,
81 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_,
82 const hb_map_t *duplicate_feature_map_,
83 hb_set_t *new_collected_feature_indexes_)
85 script_langsys_map (script_langsys_map_),
86 duplicate_feature_map (duplicate_feature_map_),
87 new_feature_indexes (new_collected_feature_indexes_),
88 script_count (0),langsys_feature_count (0) {}
91 { return script_count++ < HB_MAX_SCRIPTS; }
93 bool visitLangsys (unsigned feature_count)
95 langsys_feature_count += feature_count;
96 return langsys_feature_count < HB_MAX_LANGSYS_FEATURE_COUNT;
101 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
102 const hb_map_t *duplicate_feature_map;
103 hb_set_t *new_feature_indexes;
106 unsigned script_count;
107 unsigned langsys_feature_count;
110 struct hb_subset_layout_context_t :
111 hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
113 const char *get_name () { return "SUBSET_LAYOUT"; }
114 static return_t default_return_value () { return hb_empty_t (); }
118 return script_count++ < HB_MAX_SCRIPTS;
123 return langsys_count++ < HB_MAX_LANGSYS;
126 bool visitFeatureIndex (int count)
128 feature_index_count += count;
129 return feature_index_count < HB_MAX_FEATURE_INDICES;
132 bool visitLookupIndex()
134 lookup_index_count++;
135 return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT;
138 hb_subset_context_t *subset_context;
139 const hb_tag_t table_tag;
140 const hb_map_t *lookup_index_map;
141 const hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
142 const hb_map_t *feature_index_map;
143 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
144 hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map;
146 unsigned cur_script_index;
147 unsigned cur_feature_var_record_idx;
149 hb_subset_layout_context_t (hb_subset_context_t *c_,
153 cur_script_index (0xFFFFu),
154 cur_feature_var_record_idx (0u),
157 feature_index_count (0),
158 lookup_index_count (0)
160 if (tag_ == HB_OT_TAG_GSUB)
162 lookup_index_map = &c_->plan->gsub_lookups;
163 script_langsys_map = &c_->plan->gsub_langsys;
164 feature_index_map = &c_->plan->gsub_features;
165 feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map;
166 feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map;
170 lookup_index_map = &c_->plan->gpos_lookups;
171 script_langsys_map = &c_->plan->gpos_langsys;
172 feature_index_map = &c_->plan->gpos_features;
173 feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map;
174 feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map;
179 unsigned script_count;
180 unsigned langsys_count;
181 unsigned feature_index_count;
182 unsigned lookup_index_count;
185 struct VariationStore;
186 struct hb_collect_variation_indices_context_t :
187 hb_dispatch_context_t<hb_collect_variation_indices_context_t>
189 template <typename T>
190 return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
191 static return_t default_return_value () { return hb_empty_t (); }
193 hb_set_t *layout_variation_indices;
194 const hb_set_t *glyph_set;
195 const hb_map_t *gpos_lookups;
197 hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
198 const hb_set_t *glyph_set_,
199 const hb_map_t *gpos_lookups_) :
200 layout_variation_indices (layout_variation_indices_),
201 glyph_set (glyph_set_),
202 gpos_lookups (gpos_lookups_) {}
205 template<typename OutputArray>
206 struct subset_offset_array_t
208 subset_offset_array_t (hb_subset_context_t *subset_context_,
210 const void *base_) : subset_context (subset_context_),
211 out (out_), base (base_) {}
213 template <typename T>
214 bool operator () (T&& offset)
216 auto snap = subset_context->serializer->snapshot ();
217 auto *o = out.serialize_append (subset_context->serializer);
218 if (unlikely (!o)) return false;
219 bool ret = o->serialize_subset (subset_context, offset, base);
223 subset_context->serializer->revert (snap);
229 hb_subset_context_t *subset_context;
235 template<typename OutputArray, typename Arg>
236 struct subset_offset_array_arg_t
238 subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
241 Arg &&arg_) : subset_context (subset_context_), out (out_),
242 base (base_), arg (arg_) {}
244 template <typename T>
245 bool operator () (T&& offset)
247 auto snap = subset_context->serializer->snapshot ();
248 auto *o = out.serialize_append (subset_context->serializer);
249 if (unlikely (!o)) return false;
250 bool ret = o->serialize_subset (subset_context, offset, base, arg);
254 subset_context->serializer->revert (snap);
260 hb_subset_context_t *subset_context;
267 * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
268 * and discards the offset in the array if the subset operation results in an empty
273 template<typename OutputArray>
274 subset_offset_array_t<OutputArray>
275 operator () (hb_subset_context_t *subset_context, OutputArray& out,
276 const void *base) const
277 { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
279 /* Variant with one extra argument passed to serialize_subset */
280 template<typename OutputArray, typename Arg>
281 subset_offset_array_arg_t<OutputArray, Arg>
282 operator () (hb_subset_context_t *subset_context, OutputArray& out,
283 const void *base, Arg &&arg) const
284 { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
286 HB_FUNCOBJ (subset_offset_array);
288 template<typename OutputArray>
289 struct subset_record_array_t
291 subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
292 const void *base_) : subset_layout_context (c_),
293 out (out_), base (base_) {}
295 template <typename T>
297 operator () (T&& record)
299 auto snap = subset_layout_context->subset_context->serializer->snapshot ();
300 bool ret = record.subset (subset_layout_context, base);
301 if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
306 hb_subset_layout_context_t *subset_layout_context;
311 template<typename OutputArray, typename Arg>
312 struct subset_record_array_arg_t
314 subset_record_array_arg_t (hb_subset_layout_context_t *c_, OutputArray* out_,
316 Arg &&arg_) : subset_layout_context (c_),
317 out (out_), base (base_), arg (arg_) {}
319 template <typename T>
321 operator () (T&& record)
323 auto snap = subset_layout_context->subset_context->serializer->snapshot ();
324 bool ret = record.subset (subset_layout_context, base, arg);
325 if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
330 hb_subset_layout_context_t *subset_layout_context;
337 * Helper to subset a RecordList/record array. Subsets each Record in the array and
338 * discards the record if the subset operation returns false.
342 template<typename OutputArray>
343 subset_record_array_t<OutputArray>
344 operator () (hb_subset_layout_context_t *c, OutputArray* out,
345 const void *base) const
346 { return subset_record_array_t<OutputArray> (c, out, base); }
348 /* Variant with one extra argument passed to subset */
349 template<typename OutputArray, typename Arg>
350 subset_record_array_arg_t<OutputArray, Arg>
351 operator () (hb_subset_layout_context_t *c, OutputArray* out,
352 const void *base, Arg &&arg) const
353 { return subset_record_array_arg_t<OutputArray, Arg> (c, out, base, arg); }
355 HB_FUNCOBJ (subset_record_array);
358 template<typename OutputArray>
359 struct serialize_math_record_array_t
361 serialize_math_record_array_t (hb_serialize_context_t *serialize_context_,
363 const void *base_) : serialize_context (serialize_context_),
364 out (out_), base (base_) {}
366 template <typename T>
367 bool operator () (T&& record)
369 if (!serialize_context->copy (record, base)) return false;
375 hb_serialize_context_t *serialize_context;
381 * Helper to serialize an array of MATH records.
385 template<typename OutputArray>
386 serialize_math_record_array_t<OutputArray>
387 operator () (hb_serialize_context_t *serialize_context, OutputArray& out,
388 const void *base) const
389 { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); }
392 HB_FUNCOBJ (serialize_math_record_array);
396 * OpenType Layout Common Table Formats
402 * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
405 struct IndexArray : Array16Of<Index>
407 bool intersects (const hb_map_t *indexes) const
408 { return hb_any (*this, indexes); }
410 template <typename Iterator,
411 hb_requires (hb_is_iterator (Iterator))>
412 void serialize (hb_serialize_context_t *c,
413 hb_subset_layout_context_t *l,
417 if (unlikely (!c->extend_min ((*this)))) return;
419 for (const auto _ : it)
421 if (!l->visitLookupIndex()) break;
430 unsigned int get_indexes (unsigned int start_offset,
431 unsigned int *_count /* IN/OUT */,
432 unsigned int *_indexes /* OUT */) const
436 + this->as_array ().sub_array (start_offset, _count)
437 | hb_sink (hb_array (_indexes, *_count))
443 void add_indexes_to (hb_set_t* output /* OUT */) const
445 output->add_array (as_array ());
450 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
451 struct FeatureParamsSize
453 bool sanitize (hb_sanitize_context_t *c) const
455 TRACE_SANITIZE (this);
456 if (unlikely (!c->check_struct (this))) return_trace (false);
458 /* This subtable has some "history", if you will. Some earlier versions of
459 * Adobe tools calculated the offset of the FeatureParams subtable from the
460 * beginning of the FeatureList table! Now, that is dealt with in the
461 * Feature implementation. But we still need to be able to tell junk from
462 * real data. Note: We don't check that the nameID actually exists.
464 * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
466 * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
467 * coming out soon, and that the makeotf program will build a font with a
468 * 'size' feature that is correct by the specification.
470 * The specification for this feature tag is in the "OpenType Layout Tag
471 * Registry". You can see a copy of this at:
472 * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
474 * Here is one set of rules to determine if the 'size' feature is built
475 * correctly, or as by the older versions of MakeOTF. You may be able to do
478 * Assume that the offset to the size feature is according to specification,
479 * and make the following value checks. If it fails, assume the size
480 * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
481 * If this fails, reject the 'size' feature. The older makeOTF's calculated the
482 * offset from the beginning of the FeatureList table, rather than from the
483 * beginning of the 'size' Feature table.
485 * If "design size" == 0:
488 * Else if ("subfamily identifier" == 0 and
489 * "range start" == 0 and
490 * "range end" == 0 and
491 * "range start" == 0 and
492 * "menu name ID" == 0)
493 * passes check: this is the format used when there is a design size
494 * specified, but there is no recommended size range.
496 * Else if ("design size" < "range start" or
497 * "design size" > "range end" or
498 * "range end" <= "range start" or
499 * "menu name ID" < 256 or
500 * "menu name ID" > 32767 or
501 * menu name ID is not a name ID which is actually in the name table)
508 return_trace (false);
509 else if (subfamilyID == 0 &&
510 subfamilyNameID == 0 &&
514 else if (designSize < rangeStart ||
515 designSize > rangeEnd ||
516 subfamilyNameID < 256 ||
517 subfamilyNameID > 32767)
518 return_trace (false);
523 void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
524 { nameids_to_retain->add (subfamilyNameID); }
526 bool subset (hb_subset_context_t *c) const
529 return_trace ((bool) c->serializer->embed (*this));
532 HBUINT16 designSize; /* Represents the design size in 720/inch
533 * units (decipoints). The design size entry
534 * must be non-zero. When there is a design
535 * size but no recommended size range, the
536 * rest of the array will consist of zeros. */
537 HBUINT16 subfamilyID; /* Has no independent meaning, but serves
538 * as an identifier that associates fonts
539 * in a subfamily. All fonts which share a
540 * Preferred or Font Family name and which
541 * differ only by size range shall have the
542 * same subfamily value, and no fonts which
543 * differ in weight or style shall have the
544 * same subfamily value. If this value is
545 * zero, the remaining fields in the array
546 * will be ignored. */
547 NameID subfamilyNameID;/* If the preceding value is non-zero, this
548 * value must be set in the range 256 - 32767
549 * (inclusive). It records the value of a
550 * field in the name table, which must
551 * contain English-language strings encoded
552 * in Windows Unicode and Macintosh Roman,
553 * and may contain additional strings
554 * localized to other scripts and languages.
555 * Each of these strings is the name an
556 * application should use, in combination
557 * with the family name, to represent the
558 * subfamily in a menu. Applications will
559 * choose the appropriate version based on
560 * their selection criteria. */
561 HBUINT16 rangeStart; /* Large end of the recommended usage range
562 * (inclusive), stored in 720/inch units
564 HBUINT16 rangeEnd; /* Small end of the recommended usage range
565 (exclusive), stored in 720/inch units
568 DEFINE_SIZE_STATIC (10);
571 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
572 struct FeatureParamsStylisticSet
574 bool sanitize (hb_sanitize_context_t *c) const
576 TRACE_SANITIZE (this);
577 /* Right now minorVersion is at zero. Which means, any table supports
578 * the uiNameID field. */
579 return_trace (c->check_struct (this));
582 void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
583 { nameids_to_retain->add (uiNameID); }
585 bool subset (hb_subset_context_t *c) const
588 return_trace ((bool) c->serializer->embed (*this));
591 HBUINT16 version; /* (set to 0): This corresponds to a “minor”
592 * version number. Additional data may be
593 * added to the end of this Feature Parameters
594 * table in the future. */
596 NameID uiNameID; /* The 'name' table name ID that specifies a
597 * string (or strings, for multiple languages)
598 * for a user-interface label for this
599 * feature. The values of uiLabelNameId and
600 * sampleTextNameId are expected to be in the
601 * font-specific name ID range (256-32767),
602 * though that is not a requirement in this
603 * Feature Parameters specification. The
604 * user-interface label for the feature can
605 * be provided in multiple languages. An
606 * English string should be included as a
607 * fallback. The string should be kept to a
608 * minimal length to fit comfortably with
609 * different application interfaces. */
611 DEFINE_SIZE_STATIC (4);
614 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
615 struct FeatureParamsCharacterVariants
618 get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
622 + characters.as_array ().sub_array (start_offset, char_count)
623 | hb_sink (hb_array (chars, *char_count))
626 return characters.len;
629 unsigned get_size () const
630 { return min_size + characters.len * HBUINT24::static_size; }
632 void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
634 if (featUILableNameID) nameids_to_retain->add (featUILableNameID);
635 if (featUITooltipTextNameID) nameids_to_retain->add (featUITooltipTextNameID);
636 if (sampleTextNameID) nameids_to_retain->add (sampleTextNameID);
638 if (!firstParamUILabelNameID || !numNamedParameters || numNamedParameters >= 0x7FFF)
641 unsigned last_name_id = (unsigned) firstParamUILabelNameID + (unsigned) numNamedParameters - 1;
642 if (last_name_id >= 256 && last_name_id <= 32767)
643 nameids_to_retain->add_range (firstParamUILabelNameID, last_name_id);
646 bool subset (hb_subset_context_t *c) const
649 return_trace ((bool) c->serializer->embed (*this));
652 bool sanitize (hb_sanitize_context_t *c) const
654 TRACE_SANITIZE (this);
655 return_trace (c->check_struct (this) &&
656 characters.sanitize (c));
659 HBUINT16 format; /* Format number is set to 0. */
660 NameID featUILableNameID; /* The ‘name’ table name ID that
661 * specifies a string (or strings,
662 * for multiple languages) for a
663 * user-interface label for this
664 * feature. (May be NULL.) */
665 NameID featUITooltipTextNameID;/* The ‘name’ table name ID that
666 * specifies a string (or strings,
667 * for multiple languages) that an
668 * application can use for tooltip
669 * text for this feature. (May be
671 NameID sampleTextNameID; /* The ‘name’ table name ID that
672 * specifies sample text that
673 * illustrates the effect of this
674 * feature. (May be NULL.) */
675 HBUINT16 numNamedParameters; /* Number of named parameters. (May
677 NameID firstParamUILabelNameID;/* The first ‘name’ table name ID
678 * used to specify strings for
679 * user-interface labels for the
680 * feature parameters. (Must be zero
681 * if numParameters is zero.) */
683 characters; /* Array of the Unicode Scalar Value
684 * of the characters for which this
685 * feature provides glyph variants.
688 DEFINE_SIZE_ARRAY (14, characters);
693 bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
695 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
698 TRACE_SANITIZE (this);
699 if (tag == HB_TAG ('s','i','z','e'))
700 return_trace (u.size.sanitize (c));
701 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
702 return_trace (u.stylisticSet.sanitize (c));
703 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
704 return_trace (u.characterVariants.sanitize (c));
708 void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const
710 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
713 if (tag == HB_TAG ('s','i','z','e'))
714 return (u.size.collect_name_ids (nameids_to_retain));
715 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
716 return (u.stylisticSet.collect_name_ids (nameids_to_retain));
717 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
718 return (u.characterVariants.collect_name_ids (nameids_to_retain));
721 bool subset (hb_subset_context_t *c, const Tag* tag) const
724 if (!tag) return_trace (false);
725 if (*tag == HB_TAG ('s','i','z','e'))
726 return_trace (u.size.subset (c));
727 if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
728 return_trace (u.stylisticSet.subset (c));
729 if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
730 return_trace (u.characterVariants.subset (c));
731 return_trace (false);
734 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
735 const FeatureParamsSize& get_size_params (hb_tag_t tag) const
737 if (tag == HB_TAG ('s','i','z','e'))
739 return Null (FeatureParamsSize);
741 const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
743 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
744 return u.stylisticSet;
745 return Null (FeatureParamsStylisticSet);
747 const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
749 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
750 return u.characterVariants;
751 return Null (FeatureParamsCharacterVariants);
757 FeatureParamsSize size;
758 FeatureParamsStylisticSet stylisticSet;
759 FeatureParamsCharacterVariants characterVariants;
765 struct Record_sanitize_closure_t {
767 const void *list_base;
772 unsigned int get_lookup_count () const
773 { return lookupIndex.len; }
774 hb_tag_t get_lookup_index (unsigned int i) const
775 { return lookupIndex[i]; }
776 unsigned int get_lookup_indexes (unsigned int start_index,
777 unsigned int *lookup_count /* IN/OUT */,
778 unsigned int *lookup_tags /* OUT */) const
779 { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
780 void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
781 { lookupIndex.add_indexes_to (lookup_indexes); }
783 const FeatureParams &get_feature_params () const
784 { return this+featureParams; }
786 bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
787 { return lookupIndex.intersects (lookup_indexes); }
789 void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const
792 get_feature_params ().collect_name_ids (tag, nameids_to_retain);
795 bool subset (hb_subset_context_t *c,
796 hb_subset_layout_context_t *l,
797 const Tag *tag = nullptr) const
800 auto *out = c->serializer->start_embed (*this);
801 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
803 out->featureParams.serialize_subset (c, featureParams, this, tag);
806 + hb_iter (lookupIndex)
807 | hb_filter (l->lookup_index_map)
808 | hb_map (l->lookup_index_map)
811 out->lookupIndex.serialize (c->serializer, l, it);
812 // The decision to keep or drop this feature is already made before we get here
813 // so always retain it.
817 bool sanitize (hb_sanitize_context_t *c,
818 const Record_sanitize_closure_t *closure = nullptr) const
820 TRACE_SANITIZE (this);
821 if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
822 return_trace (false);
824 /* Some earlier versions of Adobe tools calculated the offset of the
825 * FeatureParams subtable from the beginning of the FeatureList table!
827 * If sanitizing "failed" for the FeatureParams subtable, try it with the
828 * alternative location. We would know sanitize "failed" if old value
829 * of the offset was non-zero, but it's zeroed now.
831 * Only do this for the 'size' feature, since at the time of the faulty
832 * Adobe tools, only the 'size' feature had FeatureParams defined.
835 if (likely (featureParams.is_null ()))
838 unsigned int orig_offset = featureParams;
839 if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
840 return_trace (false);
842 if (featureParams == 0 && closure &&
843 closure->tag == HB_TAG ('s','i','z','e') &&
844 closure->list_base && closure->list_base < this)
846 unsigned int new_offset_int = orig_offset -
847 (((char *) this) - ((char *) closure->list_base));
849 Offset16To<FeatureParams> new_offset;
850 /* Check that it would not overflow. */
851 new_offset = new_offset_int;
852 if (new_offset == new_offset_int &&
853 c->try_set (&featureParams, new_offset_int) &&
854 !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
855 return_trace (false);
861 Offset16To<FeatureParams>
862 featureParams; /* Offset to Feature Parameters table (if one
863 * has been defined for the feature), relative
864 * to the beginning of the Feature Table; = Null
866 IndexArray lookupIndex; /* Array of LookupList indices */
868 DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
871 template <typename Type>
874 int cmp (hb_tag_t a) const { return tag.cmp (a); }
876 bool subset (hb_subset_layout_context_t *c, const void *base, const void *f_sub = nullptr) const
879 auto *out = c->subset_context->serializer->embed (this);
880 if (unlikely (!out)) return_trace (false);
883 return_trace (out->offset.serialize_subset (c->subset_context, offset, base, c, &tag));
885 const Feature& f = *reinterpret_cast<const Feature *> (f_sub);
886 auto *s = c->subset_context->serializer;
890 bool ret = f.subset (c->subset_context, c, &tag);
892 s->add_link (out->offset, s->pop_pack ());
899 bool sanitize (hb_sanitize_context_t *c, const void *base) const
901 TRACE_SANITIZE (this);
902 const Record_sanitize_closure_t closure = {tag, base};
903 return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
906 Tag tag; /* 4-byte Tag identifier */
908 offset; /* Offset from beginning of object holding
911 DEFINE_SIZE_STATIC (6);
914 template <typename Type>
915 struct RecordArrayOf : SortedArray16Of<Record<Type>>
917 const Offset16To<Type>& get_offset (unsigned int i) const
918 { return (*this)[i].offset; }
919 Offset16To<Type>& get_offset (unsigned int i)
920 { return (*this)[i].offset; }
921 const Tag& get_tag (unsigned int i) const
922 { return (*this)[i].tag; }
923 unsigned int get_tags (unsigned int start_offset,
924 unsigned int *record_count /* IN/OUT */,
925 hb_tag_t *record_tags /* OUT */) const
929 + this->as_array ().sub_array (start_offset, record_count)
930 | hb_map (&Record<Type>::tag)
931 | hb_sink (hb_array (record_tags, *record_count))
936 bool find_index (hb_tag_t tag, unsigned int *index) const
938 return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
942 template <typename Type>
943 struct RecordListOf : RecordArrayOf<Type>
945 const Type& operator [] (unsigned int i) const
946 { return this+this->get_offset (i); }
948 bool subset (hb_subset_context_t *c,
949 hb_subset_layout_context_t *l) const
952 auto *out = c->serializer->start_embed (*this);
953 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
956 | hb_apply (subset_record_array (l, out, this))
961 bool sanitize (hb_sanitize_context_t *c) const
963 TRACE_SANITIZE (this);
964 return_trace (RecordArrayOf<Type>::sanitize (c, this));
968 struct RecordListOfFeature : RecordListOf<Feature>
970 bool subset (hb_subset_context_t *c,
971 hb_subset_layout_context_t *l) const
974 auto *out = c->serializer->start_embed (*this);
975 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
977 + hb_enumerate (*this)
978 | hb_filter (l->feature_index_map, hb_first)
979 | hb_apply ([l, out, this] (const hb_pair_t<unsigned, const Record<Feature>&>& _)
981 const Feature *f_sub = nullptr;
982 const Feature **f = nullptr;
983 if (l->feature_substitutes_map->has (_.first, &f))
986 subset_record_array (l, out, this, f_sub) (_.second);
994 typedef RecordListOf<Feature> FeatureList;
999 unsigned int get_feature_count () const
1000 { return featureIndex.len; }
1001 hb_tag_t get_feature_index (unsigned int i) const
1002 { return featureIndex[i]; }
1003 unsigned int get_feature_indexes (unsigned int start_offset,
1004 unsigned int *feature_count /* IN/OUT */,
1005 unsigned int *feature_indexes /* OUT */) const
1006 { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
1007 void add_feature_indexes_to (hb_set_t *feature_indexes) const
1008 { featureIndex.add_indexes_to (feature_indexes); }
1010 bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
1011 unsigned int get_required_feature_index () const
1013 if (reqFeatureIndex == 0xFFFFu)
1014 return Index::NOT_FOUND_INDEX;
1015 return reqFeatureIndex;
1018 LangSys* copy (hb_serialize_context_t *c) const
1020 TRACE_SERIALIZE (this);
1021 return_trace (c->embed (*this));
1024 bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
1026 if (reqFeatureIndex != o.reqFeatureIndex)
1030 + hb_iter (featureIndex)
1031 | hb_filter (feature_index_map)
1032 | hb_map (feature_index_map)
1036 + hb_iter (o.featureIndex)
1037 | hb_filter (feature_index_map)
1038 | hb_map (feature_index_map)
1041 for (; iter && o_iter; iter++, o_iter++)
1044 unsigned b = *o_iter;
1045 if (a != b) return false;
1048 if (iter || o_iter) return false;
1053 void collect_features (hb_prune_langsys_context_t *c) const
1055 if (!has_required_feature () && !get_feature_count ()) return;
1056 if (has_required_feature () &&
1057 c->duplicate_feature_map->has (reqFeatureIndex))
1058 c->new_feature_indexes->add (get_required_feature_index ());
1060 + hb_iter (featureIndex)
1061 | hb_filter (c->duplicate_feature_map)
1062 | hb_sink (c->new_feature_indexes)
1066 bool subset (hb_subset_context_t *c,
1067 hb_subset_layout_context_t *l,
1068 const Tag *tag = nullptr) const
1070 TRACE_SUBSET (this);
1071 auto *out = c->serializer->start_embed (*this);
1072 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1075 out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu;
1077 if (!l->visitFeatureIndex (featureIndex.len))
1078 return_trace (false);
1081 + hb_iter (featureIndex)
1082 | hb_filter (l->feature_index_map)
1083 | hb_map (l->feature_index_map)
1086 bool ret = bool (it);
1087 out->featureIndex.serialize (c->serializer, l, it);
1091 bool sanitize (hb_sanitize_context_t *c,
1092 const Record_sanitize_closure_t * = nullptr) const
1094 TRACE_SANITIZE (this);
1095 return_trace (c->check_struct (this) && featureIndex.sanitize (c));
1098 Offset16 lookupOrderZ; /* = Null (reserved for an offset to a
1099 * reordering table) */
1100 HBUINT16 reqFeatureIndex;/* Index of a feature required for this
1101 * language system--if no required features
1103 IndexArray featureIndex; /* Array of indices into the FeatureList */
1105 DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
1107 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
1111 unsigned int get_lang_sys_count () const
1112 { return langSys.len; }
1113 const Tag& get_lang_sys_tag (unsigned int i) const
1114 { return langSys.get_tag (i); }
1115 unsigned int get_lang_sys_tags (unsigned int start_offset,
1116 unsigned int *lang_sys_count /* IN/OUT */,
1117 hb_tag_t *lang_sys_tags /* OUT */) const
1118 { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
1119 const LangSys& get_lang_sys (unsigned int i) const
1121 if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
1122 return this+langSys[i].offset;
1124 bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
1125 { return langSys.find_index (tag, index); }
1127 bool has_default_lang_sys () const { return defaultLangSys != 0; }
1128 const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
1130 void prune_langsys (hb_prune_langsys_context_t *c,
1131 unsigned script_index) const
1133 if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
1134 if (!c->visitScript ()) return;
1136 if (!c->script_langsys_map->has (script_index))
1138 if (unlikely (!c->script_langsys_map->set (script_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
1142 if (has_default_lang_sys ())
1144 //only collect features from non-redundant langsys
1145 const LangSys& d = get_default_lang_sys ();
1146 if (c->visitLangsys (d.get_feature_count ())) {
1147 d.collect_features (c);
1150 for (auto _ : + hb_enumerate (langSys))
1152 const LangSys& l = this+_.second.offset;
1153 if (!c->visitLangsys (l.get_feature_count ())) continue;
1154 if (l.compare (d, c->duplicate_feature_map)) continue;
1156 l.collect_features (c);
1157 c->script_langsys_map->get (script_index)->add (_.first);
1162 for (auto _ : + hb_enumerate (langSys))
1164 const LangSys& l = this+_.second.offset;
1165 if (!c->visitLangsys (l.get_feature_count ())) continue;
1166 l.collect_features (c);
1167 c->script_langsys_map->get (script_index)->add (_.first);
1172 bool subset (hb_subset_context_t *c,
1173 hb_subset_layout_context_t *l,
1174 const Tag *tag) const
1176 TRACE_SUBSET (this);
1177 if (!l->visitScript ()) return_trace (false);
1178 if (tag && !c->plan->layout_scripts.has (*tag))
1181 auto *out = c->serializer->start_embed (*this);
1182 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1184 bool defaultLang = false;
1185 if (has_default_lang_sys ())
1187 c->serializer->push ();
1188 const LangSys& ls = this+defaultLangSys;
1189 bool ret = ls.subset (c, l);
1190 if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
1192 c->serializer->pop_discard ();
1193 out->defaultLangSys = 0;
1197 c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
1202 const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
1205 + hb_enumerate (langSys)
1206 | hb_filter (active_langsys, hb_first)
1207 | hb_map (hb_second)
1208 | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
1209 | hb_apply (subset_record_array (l, &(out->langSys), this))
1213 return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
1216 bool sanitize (hb_sanitize_context_t *c,
1217 const Record_sanitize_closure_t * = nullptr) const
1219 TRACE_SANITIZE (this);
1220 return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
1225 defaultLangSys; /* Offset to DefaultLangSys table--from
1226 * beginning of Script table--may be Null */
1227 RecordArrayOf<LangSys>
1228 langSys; /* Array of LangSysRecords--listed
1229 * alphabetically by LangSysTag */
1231 DEFINE_SIZE_ARRAY_SIZED (4, langSys);
1234 struct RecordListOfScript : RecordListOf<Script>
1236 bool subset (hb_subset_context_t *c,
1237 hb_subset_layout_context_t *l) const
1239 TRACE_SUBSET (this);
1240 auto *out = c->serializer->start_embed (*this);
1241 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1243 for (auto _ : + hb_enumerate (*this))
1245 auto snap = c->serializer->snapshot ();
1246 l->cur_script_index = _.first;
1247 bool ret = _.second.subset (l, this);
1248 if (!ret) c->serializer->revert (snap);
1252 return_trace (true);
1256 typedef RecordListOfScript ScriptList;
1260 struct LookupFlag : HBUINT16
1263 RightToLeft = 0x0001u,
1264 IgnoreBaseGlyphs = 0x0002u,
1265 IgnoreLigatures = 0x0004u,
1266 IgnoreMarks = 0x0008u,
1267 IgnoreFlags = 0x000Eu,
1268 UseMarkFilteringSet = 0x0010u,
1270 MarkAttachmentType = 0xFF00u
1273 DEFINE_SIZE_STATIC (2);
1276 } /* namespace OT */
1277 /* This has to be outside the namespace. */
1278 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
1283 unsigned int get_subtable_count () const { return subTable.len; }
1285 template <typename TSubTable>
1286 const Array16OfOffset16To<TSubTable>& get_subtables () const
1287 { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
1288 template <typename TSubTable>
1289 Array16OfOffset16To<TSubTable>& get_subtables ()
1290 { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
1292 template <typename TSubTable>
1293 const TSubTable& get_subtable (unsigned int i) const
1294 { return this+get_subtables<TSubTable> ()[i]; }
1295 template <typename TSubTable>
1296 TSubTable& get_subtable (unsigned int i)
1297 { return this+get_subtables<TSubTable> ()[i]; }
1299 unsigned int get_size () const
1301 const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1302 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1303 return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1304 return (const char *) &markFilteringSet - (const char *) this;
1307 unsigned int get_type () const { return lookupType; }
1309 /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1310 * higher 16-bit is mark-filtering-set if the lookup uses one.
1311 * Not to be confused with glyph_props which is very similar. */
1312 uint32_t get_props () const
1314 unsigned int flag = lookupFlag;
1315 if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
1317 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1318 flag += (markFilteringSet << 16);
1323 template <typename TSubTable, typename context_t, typename ...Ts>
1324 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1326 unsigned int lookup_type = get_type ();
1327 TRACE_DISPATCH (this, lookup_type);
1328 unsigned int count = get_subtable_count ();
1329 for (unsigned int i = 0; i < count; i++) {
1330 typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, std::forward<Ts> (ds)...);
1331 if (c->stop_sublookup_iteration (r))
1334 return_trace (c->default_return_value ());
1337 bool serialize (hb_serialize_context_t *c,
1338 unsigned int lookup_type,
1339 uint32_t lookup_props,
1340 unsigned int num_subtables)
1342 TRACE_SERIALIZE (this);
1343 if (unlikely (!c->extend_min (this))) return_trace (false);
1344 lookupType = lookup_type;
1345 lookupFlag = lookup_props & 0xFFFFu;
1346 if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1347 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1349 if (unlikely (!c->extend (this))) return_trace (false);
1350 HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1351 markFilteringSet = lookup_props >> 16;
1353 return_trace (true);
1356 template <typename TSubTable>
1357 bool subset (hb_subset_context_t *c) const
1359 TRACE_SUBSET (this);
1360 auto *out = c->serializer->start_embed (*this);
1361 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1362 out->lookupType = lookupType;
1363 out->lookupFlag = lookupFlag;
1365 const hb_set_t *glyphset = c->plan->glyphset_gsub ();
1366 unsigned int lookup_type = get_type ();
1367 + hb_iter (get_subtables <TSubTable> ())
1368 | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1369 | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1372 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1374 if (unlikely (!c->serializer->extend (out))) return_trace (false);
1375 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1376 HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
1377 outMarkFilteringSet = markFilteringSet;
1380 // Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
1381 // indices being consistent with those computed during planning. So if an empty lookup is
1382 // discarded during the subset phase it will invalidate all subsequent lookup indices.
1383 // Generally we shouldn't end up with an empty lookup as we pre-prune them during the planning
1384 // phase, but it can happen in rare cases such as when during closure subtable is considered
1385 // degenerate (see: https://github.com/harfbuzz/harfbuzz/issues/3853)
1386 return_trace (true);
1389 template <typename TSubTable>
1390 bool sanitize (hb_sanitize_context_t *c) const
1392 TRACE_SANITIZE (this);
1393 if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1395 unsigned subtables = get_subtable_count ();
1396 if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1398 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1400 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1401 if (!markFilteringSet.sanitize (c)) return_trace (false);
1404 if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1405 return_trace (false);
1407 if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
1409 /* The spec says all subtables of an Extension lookup should
1410 * have the same type, which shall not be the Extension type
1411 * itself (but we already checked for that).
1412 * This is specially important if one has a reverse type!
1414 * We only do this if sanitizer edit_count is zero. Otherwise,
1415 * some of the subtables might have become insane after they
1416 * were sanity-checked by the edits of subsequent subtables.
1417 * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1419 unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1420 for (unsigned int i = 1; i < subtables; i++)
1421 if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1422 return_trace (false);
1424 return_trace (true);
1428 HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */
1429 HBUINT16 lookupFlag; /* Lookup qualifiers */
1431 subTable; /* Array of SubTables */
1432 /*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1433 * structure. This field is only present if bit
1434 * UseMarkFilteringSet of lookup flags is set. */
1436 DEFINE_SIZE_ARRAY (6, subTable);
1439 template <typename Types>
1440 using LookupList = List16OfOffsetTo<Lookup, typename Types::HBUINT>;
1442 template <typename TLookup, typename OffsetType>
1443 struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
1445 bool subset (hb_subset_context_t *c,
1446 hb_subset_layout_context_t *l) const
1448 TRACE_SUBSET (this);
1449 auto *out = c->serializer->start_embed (this);
1450 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1452 + hb_enumerate (*this)
1453 | hb_filter (l->lookup_index_map, hb_first)
1454 | hb_map (hb_second)
1455 | hb_apply (subset_offset_array (c, *out, this))
1457 return_trace (true);
1460 bool sanitize (hb_sanitize_context_t *c) const
1462 TRACE_SANITIZE (this);
1463 return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
1473 static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1474 const hb_set_t &klasses,
1475 bool use_class_zero,
1476 hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
1477 hb_map_t *klass_map /*IN/OUT*/)
1480 return ClassDef_serialize (c, glyph_and_klass.iter ());
1482 /* any glyph not assigned a class value falls into Class zero (0),
1483 * if any glyph assigned to class 0, remapping must start with 0->0*/
1484 if (!use_class_zero)
1485 klass_map->set (0, 0);
1487 unsigned idx = klass_map->has (0) ? 1 : 0;
1488 for (const unsigned k: klasses)
1490 if (klass_map->has (k)) continue;
1491 klass_map->set (k, idx);
1496 for (unsigned i = 0; i < glyph_and_klass.length; i++)
1498 hb_codepoint_t klass = glyph_and_klass[i].second;
1499 glyph_and_klass[i].second = klass_map->get (klass);
1502 c->propagate_error (glyph_and_klass, klasses);
1503 return ClassDef_serialize (c, glyph_and_klass.iter ());
1507 * Class Definition Table
1510 template <typename Types>
1511 struct ClassDefFormat1_3
1513 friend struct ClassDef;
1516 unsigned int get_class (hb_codepoint_t glyph_id) const
1518 return classValue[(unsigned int) (glyph_id - startGlyph)];
1521 unsigned get_population () const
1523 return classValue.len;
1526 template<typename Iterator,
1527 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1528 bool serialize (hb_serialize_context_t *c,
1531 TRACE_SERIALIZE (this);
1532 if (unlikely (!c->extend_min (this))) return_trace (false);
1539 return_trace (true);
1542 hb_codepoint_t glyph_min = (*it).first;
1543 hb_codepoint_t glyph_max = + it
1545 | hb_reduce (hb_max, 0u);
1546 unsigned glyph_count = glyph_max - glyph_min + 1;
1548 startGlyph = glyph_min;
1549 if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
1550 for (const hb_pair_t<hb_codepoint_t, uint32_t> gid_klass_pair : + it)
1552 unsigned idx = gid_klass_pair.first - glyph_min;
1553 classValue[idx] = gid_klass_pair.second;
1555 return_trace (true);
1558 bool subset (hb_subset_context_t *c,
1559 hb_map_t *klass_map = nullptr /*OUT*/,
1560 bool keep_empty_table = true,
1561 bool use_class_zero = true,
1562 const Coverage* glyph_filter = nullptr) const
1564 TRACE_SUBSET (this);
1565 const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
1567 hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
1568 hb_set_t orig_klasses;
1570 hb_codepoint_t start = startGlyph;
1571 hb_codepoint_t end = start + classValue.len;
1573 for (const hb_codepoint_t gid : + hb_range (start, end))
1575 hb_codepoint_t new_gid = glyph_map[gid];
1576 if (new_gid == HB_MAP_VALUE_INVALID) continue;
1577 if (glyph_filter && !glyph_filter->has(gid)) continue;
1579 unsigned klass = classValue[gid - start];
1580 if (!klass) continue;
1582 glyph_and_klass.push (hb_pair (new_gid, klass));
1583 orig_klasses.add (klass);
1588 unsigned glyph_count = glyph_filter
1589 ? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter))
1590 : glyph_map.get_population ();
1591 use_class_zero = glyph_count <= glyph_and_klass.length;
1593 if (!ClassDef_remap_and_serialize (c->serializer,
1598 return_trace (false);
1599 return_trace (keep_empty_table || (bool) glyph_and_klass);
1602 bool sanitize (hb_sanitize_context_t *c) const
1604 TRACE_SANITIZE (this);
1605 return_trace (c->check_struct (this) && classValue.sanitize (c));
1608 unsigned cost () const { return 1; }
1610 template <typename set_t>
1611 bool collect_coverage (set_t *glyphs) const
1613 unsigned int start = 0;
1614 unsigned int count = classValue.len;
1615 for (unsigned int i = 0; i < count; i++)
1621 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
1627 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
1633 template <typename set_t>
1634 bool collect_class (set_t *glyphs, unsigned klass) const
1636 unsigned int count = classValue.len;
1637 for (unsigned int i = 0; i < count; i++)
1638 if (classValue[i] == klass) glyphs->add (startGlyph + i);
1642 bool intersects (const hb_set_t *glyphs) const
1644 hb_codepoint_t start = startGlyph;
1645 hb_codepoint_t end = startGlyph + classValue.len;
1646 for (hb_codepoint_t iter = startGlyph - 1;
1647 glyphs->next (&iter) && iter < end;)
1648 if (classValue[iter - start]) return true;
1651 bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
1653 unsigned int count = classValue.len;
1656 /* Match if there's any glyph that is not listed! */
1657 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1658 if (!glyphs->next (&g)) return false;
1659 if (g < startGlyph) return true;
1660 g = startGlyph + count - 1;
1661 if (glyphs->next (&g)) return true;
1664 /* TODO Speed up, using set overlap first? */
1665 /* TODO(iter) Rewrite as dagger. */
1666 const HBUINT16 *arr = classValue.arrayZ;
1667 for (unsigned int i = 0; i < count; i++)
1668 if (arr[i] == klass && glyphs->has (startGlyph + i))
1673 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
1675 unsigned count = classValue.len;
1678 unsigned start_glyph = startGlyph;
1679 for (uint32_t g = HB_SET_VALUE_INVALID;
1680 glyphs->next (&g) && g < start_glyph;)
1681 intersect_glyphs->add (g);
1683 for (uint32_t g = startGlyph + count - 1;
1684 glyphs-> next (&g);)
1685 intersect_glyphs->add (g);
1690 for (unsigned i = 0; i < count; i++)
1691 if (classValue[i] == klass && glyphs->has (startGlyph + i))
1692 intersect_glyphs->add (startGlyph + i);
1695 /* The following implementation is faster asymptotically, but slower
1697 unsigned start_glyph = startGlyph;
1698 unsigned end_glyph = start_glyph + count;
1699 for (unsigned g = startGlyph - 1;
1700 glyphs->next (&g) && g < end_glyph;)
1701 if (classValue.arrayZ[g - start_glyph] == klass)
1702 intersect_glyphs->add (g);
1706 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
1708 if (glyphs->is_empty ()) return;
1709 hb_codepoint_t end_glyph = startGlyph + classValue.len - 1;
1710 if (glyphs->get_min () < startGlyph ||
1711 glyphs->get_max () > end_glyph)
1712 intersect_classes->add (0);
1714 for (const auto& _ : + hb_enumerate (classValue))
1716 hb_codepoint_t g = startGlyph + _.first;
1717 if (glyphs->has (g))
1718 intersect_classes->add (_.second);
1723 HBUINT16 classFormat; /* Format identifier--format = 1 */
1724 typename Types::HBGlyphID
1725 startGlyph; /* First GlyphID of the classValueArray */
1726 typename Types::template ArrayOf<HBUINT16>
1727 classValue; /* Array of Class Values--one per GlyphID */
1729 DEFINE_SIZE_ARRAY (2 + 2 * Types::size, classValue);
1732 template <typename Types>
1733 struct ClassDefFormat2_4
1735 friend struct ClassDef;
1738 unsigned int get_class (hb_codepoint_t glyph_id) const
1740 return rangeRecord.bsearch (glyph_id).value;
1743 unsigned get_population () const
1745 typename Types::large_int ret = 0;
1746 for (const auto &r : rangeRecord)
1747 ret += r.get_population ();
1748 return ret > UINT_MAX ? UINT_MAX : (unsigned) ret;
1751 template<typename Iterator,
1752 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1753 bool serialize (hb_serialize_context_t *c,
1756 TRACE_SERIALIZE (this);
1757 if (unlikely (!c->extend_min (this))) return_trace (false);
1762 rangeRecord.len = 0;
1763 return_trace (true);
1766 unsigned unsorted = false;
1767 unsigned num_ranges = 1;
1768 hb_codepoint_t prev_gid = (*it).first;
1769 unsigned prev_klass = (*it).second;
1771 RangeRecord<Types> range_rec;
1772 range_rec.first = prev_gid;
1773 range_rec.last = prev_gid;
1774 range_rec.value = prev_klass;
1776 auto *record = c->copy (range_rec);
1777 if (unlikely (!record)) return_trace (false);
1779 for (const auto gid_klass_pair : + (++it))
1781 hb_codepoint_t cur_gid = gid_klass_pair.first;
1782 unsigned cur_klass = gid_klass_pair.second;
1784 if (cur_gid != prev_gid + 1 ||
1785 cur_klass != prev_klass)
1788 if (unlikely (cur_gid < prev_gid))
1791 if (unlikely (!record)) break;
1792 record->last = prev_gid;
1795 range_rec.first = cur_gid;
1796 range_rec.last = cur_gid;
1797 range_rec.value = cur_klass;
1799 record = c->copy (range_rec);
1802 prev_klass = cur_klass;
1806 if (unlikely (c->in_error ())) return_trace (false);
1808 if (likely (record)) record->last = prev_gid;
1809 rangeRecord.len = num_ranges;
1811 if (unlikely (unsorted))
1812 rangeRecord.as_array ().qsort (RangeRecord<Types>::cmp_range);
1814 return_trace (true);
1817 bool subset (hb_subset_context_t *c,
1818 hb_map_t *klass_map = nullptr /*OUT*/,
1819 bool keep_empty_table = true,
1820 bool use_class_zero = true,
1821 const Coverage* glyph_filter = nullptr) const
1823 TRACE_SUBSET (this);
1824 const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
1825 const hb_set_t &glyph_set = *c->plan->glyphset_gsub ();
1827 hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
1828 hb_set_t orig_klasses;
1830 if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2
1831 < get_population ())
1833 for (hb_codepoint_t g : glyph_set)
1835 unsigned klass = get_class (g);
1836 if (!klass) continue;
1837 hb_codepoint_t new_gid = glyph_map[g];
1838 if (new_gid == HB_MAP_VALUE_INVALID) continue;
1839 if (glyph_filter && !glyph_filter->has (g)) continue;
1840 glyph_and_klass.push (hb_pair (new_gid, klass));
1841 orig_klasses.add (klass);
1846 unsigned num_source_glyphs = c->plan->source->get_num_glyphs ();
1847 for (auto &range : rangeRecord)
1849 unsigned klass = range.value;
1850 if (!klass) continue;
1851 hb_codepoint_t start = range.first;
1852 hb_codepoint_t end = hb_min (range.last + 1, num_source_glyphs);
1853 for (hb_codepoint_t g = start; g < end; g++)
1855 hb_codepoint_t new_gid = glyph_map[g];
1856 if (new_gid == HB_MAP_VALUE_INVALID) continue;
1857 if (glyph_filter && !glyph_filter->has (g)) continue;
1859 glyph_and_klass.push (hb_pair (new_gid, klass));
1860 orig_klasses.add (klass);
1865 const hb_set_t& glyphset = *c->plan->glyphset_gsub ();
1866 unsigned glyph_count = glyph_filter
1867 ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
1868 : glyph_map.get_population ();
1869 use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
1870 if (!ClassDef_remap_and_serialize (c->serializer,
1875 return_trace (false);
1876 return_trace (keep_empty_table || (bool) glyph_and_klass);
1879 bool sanitize (hb_sanitize_context_t *c) const
1881 TRACE_SANITIZE (this);
1882 return_trace (rangeRecord.sanitize (c));
1885 unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ }
1887 template <typename set_t>
1888 bool collect_coverage (set_t *glyphs) const
1890 for (auto &range : rangeRecord)
1892 if (unlikely (!range.collect_coverage (glyphs)))
1897 template <typename set_t>
1898 bool collect_class (set_t *glyphs, unsigned int klass) const
1900 for (auto &range : rangeRecord)
1902 if (range.value == klass)
1903 if (unlikely (!range.collect_coverage (glyphs)))
1909 bool intersects (const hb_set_t *glyphs) const
1911 if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
1913 for (auto g : *glyphs)
1919 return hb_any (+ hb_iter (rangeRecord)
1920 | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs) && range.value; }));
1922 bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
1926 /* Match if there's any glyph that is not listed! */
1927 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1928 hb_codepoint_t last = HB_SET_VALUE_INVALID;
1929 auto it = hb_iter (rangeRecord);
1930 for (auto &range : it)
1932 if (it->first == last + 1)
1938 if (!glyphs->next (&g))
1940 if (g < range.first)
1945 if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
1949 for (const auto &range : rangeRecord)
1950 if (range.value == klass && range.intersects (*glyphs))
1955 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
1959 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1960 for (auto &range : rangeRecord)
1962 if (!glyphs->next (&g))
1964 while (g < range.first)
1966 intersect_glyphs->add (g);
1967 if (!glyphs->next (&g))
1972 while (glyphs->next (&g))
1973 intersect_glyphs->add (g);
1979 unsigned count = rangeRecord.len;
1980 if (count > glyphs->get_population () * hb_bit_storage (count) * 8)
1982 for (auto g : *glyphs)
1985 if (rangeRecord.as_array ().bfind (g, &i) &&
1986 rangeRecord.arrayZ[i].value == klass)
1987 intersect_glyphs->add (g);
1992 for (auto &range : rangeRecord)
1994 if (range.value != klass) continue;
1996 unsigned end = range.last + 1;
1997 for (hb_codepoint_t g = range.first - 1;
1998 glyphs->next (&g) && g < end;)
1999 intersect_glyphs->add (g);
2003 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2005 if (glyphs->is_empty ()) return;
2007 hb_codepoint_t g = HB_SET_VALUE_INVALID;
2008 for (auto &range : rangeRecord)
2010 if (!glyphs->next (&g))
2012 if (g < range.first)
2014 intersect_classes->add (0);
2019 if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
2020 intersect_classes->add (0);
2022 for (const auto& range : rangeRecord)
2023 if (range.intersects (*glyphs))
2024 intersect_classes->add (range.value);
2028 HBUINT16 classFormat; /* Format identifier--format = 2 */
2029 typename Types::template SortedArrayOf<RangeRecord<Types>>
2030 rangeRecord; /* Array of glyph ranges--ordered by
2033 DEFINE_SIZE_ARRAY (2 + Types::size, rangeRecord);
2038 /* Has interface. */
2039 unsigned operator [] (hb_codepoint_t k) const { return get (k); }
2040 bool has (hb_codepoint_t k) const { return (*this)[k]; }
2042 hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
2044 unsigned int get (hb_codepoint_t k) const { return get_class (k); }
2045 unsigned int get_class (hb_codepoint_t glyph_id) const
2048 case 1: return u.format1.get_class (glyph_id);
2049 case 2: return u.format2.get_class (glyph_id);
2050 #ifndef HB_NO_BEYOND_64K
2051 case 3: return u.format3.get_class (glyph_id);
2052 case 4: return u.format4.get_class (glyph_id);
2058 unsigned get_population () const
2061 case 1: return u.format1.get_population ();
2062 case 2: return u.format2.get_population ();
2063 #ifndef HB_NO_BEYOND_64K
2064 case 3: return u.format3.get_population ();
2065 case 4: return u.format4.get_population ();
2067 default:return NOT_COVERED;
2071 template<typename Iterator,
2072 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
2073 bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
2075 TRACE_SERIALIZE (this);
2076 if (unlikely (!c->extend_min (this))) return_trace (false);
2078 auto it = + it_with_class_zero | hb_filter (hb_second);
2080 unsigned format = 2;
2081 hb_codepoint_t glyph_max = 0;
2084 hb_codepoint_t glyph_min = (*it).first;
2085 glyph_max = glyph_min;
2087 unsigned num_glyphs = 0;
2088 unsigned num_ranges = 1;
2089 hb_codepoint_t prev_gid = glyph_min;
2090 unsigned prev_klass = (*it).second;
2092 for (const auto gid_klass_pair : it)
2094 hb_codepoint_t cur_gid = gid_klass_pair.first;
2095 unsigned cur_klass = gid_klass_pair.second;
2097 if (cur_gid == glyph_min) continue;
2098 if (cur_gid > glyph_max) glyph_max = cur_gid;
2099 if (cur_gid != prev_gid + 1 ||
2100 cur_klass != prev_klass)
2104 prev_klass = cur_klass;
2107 if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2111 #ifndef HB_NO_BEYOND_64K
2112 if (glyph_max > 0xFFFFu)
2114 if (unlikely (glyph_max > 0xFFFFFFu))
2116 if (unlikely (glyph_max > 0xFFFFu))
2119 c->check_success (false, HB_SERIALIZE_ERROR_INT_OVERFLOW);
2120 return_trace (false);
2127 case 1: return_trace (u.format1.serialize (c, it));
2128 case 2: return_trace (u.format2.serialize (c, it));
2129 #ifndef HB_NO_BEYOND_64K
2130 case 3: return_trace (u.format3.serialize (c, it));
2131 case 4: return_trace (u.format4.serialize (c, it));
2133 default:return_trace (false);
2137 bool subset (hb_subset_context_t *c,
2138 hb_map_t *klass_map = nullptr /*OUT*/,
2139 bool keep_empty_table = true,
2140 bool use_class_zero = true,
2141 const Coverage* glyph_filter = nullptr) const
2143 TRACE_SUBSET (this);
2145 case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2146 case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2147 #ifndef HB_NO_BEYOND_64K
2148 case 3: return_trace (u.format3.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2149 case 4: return_trace (u.format4.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2151 default:return_trace (false);
2155 bool sanitize (hb_sanitize_context_t *c) const
2157 TRACE_SANITIZE (this);
2158 if (!u.format.sanitize (c)) return_trace (false);
2160 case 1: return_trace (u.format1.sanitize (c));
2161 case 2: return_trace (u.format2.sanitize (c));
2162 #ifndef HB_NO_BEYOND_64K
2163 case 3: return_trace (u.format3.sanitize (c));
2164 case 4: return_trace (u.format4.sanitize (c));
2166 default:return_trace (true);
2170 unsigned cost () const
2173 case 1: return u.format1.cost ();
2174 case 2: return u.format2.cost ();
2175 #ifndef HB_NO_BEYOND_64K
2176 case 3: return u.format3.cost ();
2177 case 4: return u.format4.cost ();
2183 /* Might return false if array looks unsorted.
2184 * Used for faster rejection of corrupt data. */
2185 template <typename set_t>
2186 bool collect_coverage (set_t *glyphs) const
2189 case 1: return u.format1.collect_coverage (glyphs);
2190 case 2: return u.format2.collect_coverage (glyphs);
2191 #ifndef HB_NO_BEYOND_64K
2192 case 3: return u.format3.collect_coverage (glyphs);
2193 case 4: return u.format4.collect_coverage (glyphs);
2195 default:return false;
2199 /* Might return false if array looks unsorted.
2200 * Used for faster rejection of corrupt data. */
2201 template <typename set_t>
2202 bool collect_class (set_t *glyphs, unsigned int klass) const
2205 case 1: return u.format1.collect_class (glyphs, klass);
2206 case 2: return u.format2.collect_class (glyphs, klass);
2207 #ifndef HB_NO_BEYOND_64K
2208 case 3: return u.format3.collect_class (glyphs, klass);
2209 case 4: return u.format4.collect_class (glyphs, klass);
2211 default:return false;
2215 bool intersects (const hb_set_t *glyphs) const
2218 case 1: return u.format1.intersects (glyphs);
2219 case 2: return u.format2.intersects (glyphs);
2220 #ifndef HB_NO_BEYOND_64K
2221 case 3: return u.format3.intersects (glyphs);
2222 case 4: return u.format4.intersects (glyphs);
2224 default:return false;
2227 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2230 case 1: return u.format1.intersects_class (glyphs, klass);
2231 case 2: return u.format2.intersects_class (glyphs, klass);
2232 #ifndef HB_NO_BEYOND_64K
2233 case 3: return u.format3.intersects_class (glyphs, klass);
2234 case 4: return u.format4.intersects_class (glyphs, klass);
2236 default:return false;
2240 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2243 case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2244 case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2245 #ifndef HB_NO_BEYOND_64K
2246 case 3: return u.format3.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2247 case 4: return u.format4.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2253 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2256 case 1: return u.format1.intersected_classes (glyphs, intersect_classes);
2257 case 2: return u.format2.intersected_classes (glyphs, intersect_classes);
2258 #ifndef HB_NO_BEYOND_64K
2259 case 3: return u.format3.intersected_classes (glyphs, intersect_classes);
2260 case 4: return u.format4.intersected_classes (glyphs, intersect_classes);
2269 HBUINT16 format; /* Format identifier */
2270 ClassDefFormat1_3<SmallTypes> format1;
2271 ClassDefFormat2_4<SmallTypes> format2;
2272 #ifndef HB_NO_BEYOND_64K
2273 ClassDefFormat1_3<MediumTypes>format3;
2274 ClassDefFormat2_4<MediumTypes>format4;
2278 DEFINE_SIZE_UNION (2, format);
2281 template<typename Iterator>
2282 static inline bool ClassDef_serialize (hb_serialize_context_t *c,
2284 { return (c->start_embed<ClassDef> ()->serialize (c, it)); }
2288 * Item Variation Store
2291 /* ported from fonttools (class _Encoding) */
2292 struct delta_row_encoding_t
2294 /* each byte represents a region, value is one of 0/1/2/4, which means bytes
2295 * needed for this region */
2296 hb_vector_t<uint8_t> chars;
2298 hb_vector_t<uint8_t> columns;
2299 unsigned overhead = 0;
2300 hb_vector_t<const hb_vector_t<int>*> items;
2302 delta_row_encoding_t () = default;
2303 delta_row_encoding_t (hb_vector_t<uint8_t>&& chars_,
2304 const hb_vector_t<int>* row = nullptr) :
2305 delta_row_encoding_t ()
2308 chars = std::move (chars_);
2309 width = get_width ();
2310 columns = get_columns ();
2311 overhead = get_chars_overhead (columns);
2312 if (row) items.push (row);
2315 bool is_empty () const
2318 static hb_vector_t<uint8_t> get_row_chars (const hb_vector_t<int>& row)
2320 hb_vector_t<uint8_t> ret;
2321 if (!ret.alloc (row.length)) return ret;
2323 bool long_words = false;
2325 /* 0/1/2 byte encoding */
2326 for (int i = row.length - 1; i >= 0; i--)
2328 int v = row.arrayZ[i];
2331 else if (v > 32767 || v < -32768)
2336 else if (v > 127 || v < -128)
2345 /* redo, 0/2/4 bytes encoding */
2347 for (int i = row.length - 1; i >= 0; i--)
2349 int v = row.arrayZ[i];
2352 else if (v > 32767 || v < -32768)
2360 inline unsigned get_width ()
2362 unsigned ret = + hb_iter (chars)
2363 | hb_reduce (hb_add, 0u)
2368 hb_vector_t<uint8_t> get_columns ()
2370 hb_vector_t<uint8_t> cols;
2371 cols.alloc (chars.length);
2372 for (auto v : chars)
2374 uint8_t flag = v ? 1 : 0;
2380 static inline unsigned get_chars_overhead (const hb_vector_t<uint8_t>& cols)
2382 unsigned c = 4 + 6; // 4 bytes for LOffset, 6 bytes for VarData header
2383 unsigned cols_bit_count = 0;
2385 if (v) cols_bit_count++;
2386 return c + cols_bit_count * 2;
2389 unsigned get_gain () const
2391 int count = items.length;
2392 return hb_max (0, (int) overhead - count);
2395 int gain_from_merging (const delta_row_encoding_t& other_encoding) const
2397 int combined_width = 0;
2398 for (unsigned i = 0; i < chars.length; i++)
2399 combined_width += hb_max (chars.arrayZ[i], other_encoding.chars.arrayZ[i]);
2401 hb_vector_t<uint8_t> combined_columns;
2402 combined_columns.alloc (columns.length);
2403 for (unsigned i = 0; i < columns.length; i++)
2404 combined_columns.push (columns.arrayZ[i] | other_encoding.columns.arrayZ[i]);
2406 int combined_overhead = get_chars_overhead (combined_columns);
2407 int combined_gain = (int) overhead + (int) other_encoding.overhead - combined_overhead
2408 - (combined_width - (int) width) * items.length
2409 - (combined_width - (int) other_encoding.width) * other_encoding.items.length;
2411 return combined_gain;
2414 static int cmp (const void *pa, const void *pb)
2416 const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
2417 const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
2419 int gain_a = a->get_gain ();
2420 int gain_b = b->get_gain ();
2422 if (gain_a != gain_b)
2423 return gain_a - gain_b;
2425 return (b->chars).as_array ().cmp ((a->chars).as_array ());
2428 static int cmp_width (const void *pa, const void *pb)
2430 const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
2431 const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
2433 if (a->width != b->width)
2434 return (int) a->width - (int) b->width;
2436 return (b->chars).as_array ().cmp ((a->chars).as_array ());
2439 bool add_row (const hb_vector_t<int>* row)
2440 { return items.push (row); }
2443 struct VarRegionAxis
2445 float evaluate (int coord) const
2447 int peak = peakCoord.to_int ();
2448 if (peak == 0 || coord == peak)
2451 int start = startCoord.to_int (), end = endCoord.to_int ();
2453 /* TODO Move these to sanitize(). */
2454 if (unlikely (start > peak || peak > end))
2456 if (unlikely (start < 0 && end > 0 && peak != 0))
2459 if (coord <= start || end <= coord)
2464 return float (coord - start) / (peak - start);
2466 return float (end - coord) / (end - peak);
2469 bool sanitize (hb_sanitize_context_t *c) const
2471 TRACE_SANITIZE (this);
2472 return_trace (c->check_struct (this));
2473 /* TODO Handle invalid start/peak/end configs, so we don't
2474 * have to do that at runtime. */
2477 bool serialize (hb_serialize_context_t *c) const
2479 TRACE_SERIALIZE (this);
2480 return_trace (c->embed (this));
2488 DEFINE_SIZE_STATIC (6);
2491 #define REGION_CACHE_ITEM_CACHE_INVALID 2.f
2493 struct VarRegionList
2495 using cache_t = float;
2497 float evaluate (unsigned int region_index,
2498 const int *coords, unsigned int coord_len,
2499 cache_t *cache = nullptr) const
2501 if (unlikely (region_index >= regionCount))
2504 float *cached_value = nullptr;
2507 cached_value = &(cache[region_index]);
2508 if (likely (*cached_value != REGION_CACHE_ITEM_CACHE_INVALID))
2509 return *cached_value;
2512 const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2515 unsigned int count = axisCount;
2516 for (unsigned int i = 0; i < count; i++)
2518 int coord = i < coord_len ? coords[i] : 0;
2519 float factor = axes[i].evaluate (coord);
2534 bool sanitize (hb_sanitize_context_t *c) const
2536 TRACE_SANITIZE (this);
2537 return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
2540 bool serialize (hb_serialize_context_t *c,
2541 const hb_vector_t<hb_tag_t>& axis_tags,
2542 const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& regions)
2544 TRACE_SERIALIZE (this);
2545 unsigned axis_count = axis_tags.length;
2546 unsigned region_count = regions.length;
2547 if (!axis_count || !region_count) return_trace (false);
2548 if (unlikely (hb_unsigned_mul_overflows (axis_count * region_count,
2549 VarRegionAxis::static_size))) return_trace (false);
2550 if (unlikely (!c->extend_min (this))) return_trace (false);
2551 axisCount = axis_count;
2552 regionCount = region_count;
2554 for (unsigned r = 0; r < region_count; r++)
2556 const auto& region = regions[r];
2557 for (unsigned i = 0; i < axis_count; i++)
2559 hb_tag_t tag = axis_tags.arrayZ[i];
2560 VarRegionAxis var_region_rec;
2562 if (region->has (tag, &coords))
2564 var_region_rec.startCoord.set_float (coords->minimum);
2565 var_region_rec.peakCoord.set_float (coords->middle);
2566 var_region_rec.endCoord.set_float (coords->maximum);
2570 var_region_rec.startCoord.set_int (0);
2571 var_region_rec.peakCoord.set_int (0);
2572 var_region_rec.endCoord.set_int (0);
2574 if (!var_region_rec.serialize (c))
2575 return_trace (false);
2578 return_trace (true);
2581 bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t ®ion_map)
2583 TRACE_SERIALIZE (this);
2584 if (unlikely (!c->extend_min (this))) return_trace (false);
2585 axisCount = src->axisCount;
2586 regionCount = region_map.get_population ();
2587 if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount,
2588 VarRegionAxis::static_size))) return_trace (false);
2589 if (unlikely (!c->extend (this))) return_trace (false);
2590 unsigned int region_count = src->regionCount;
2591 for (unsigned int r = 0; r < regionCount; r++)
2593 unsigned int backward = region_map.backward (r);
2594 if (backward >= region_count) return_trace (false);
2595 hb_memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2598 return_trace (true);
2601 bool get_var_region (unsigned region_index,
2602 const hb_map_t& axes_old_index_tag_map,
2603 hb_hashmap_t<hb_tag_t, Triple>& axis_tuples /* OUT */) const
2605 if (region_index >= regionCount) return false;
2606 const VarRegionAxis* axis_region = axesZ.arrayZ + (region_index * axisCount);
2607 for (unsigned i = 0; i < axisCount; i++)
2610 if (!axes_old_index_tag_map.has (i, &axis_tag))
2613 float min_val = axis_region->startCoord.to_float ();
2614 float def_val = axis_region->peakCoord.to_float ();
2615 float max_val = axis_region->endCoord.to_float ();
2618 axis_tuples.set (*axis_tag, Triple (min_val, def_val, max_val));
2621 return !axis_tuples.in_error ();
2624 bool get_var_regions (const hb_map_t& axes_old_index_tag_map,
2625 hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions /* OUT */) const
2627 if (!regions.alloc (regionCount))
2630 for (unsigned i = 0; i < regionCount; i++)
2632 hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
2633 if (!get_var_region (i, axes_old_index_tag_map, axis_tuples))
2635 regions.push (std::move (axis_tuples));
2637 return !regions.in_error ();
2640 unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
2644 HBUINT15 regionCount;
2646 UnsizedArrayOf<VarRegionAxis>
2649 DEFINE_SIZE_ARRAY (4, axesZ);
2654 unsigned int get_item_count () const
2655 { return itemCount; }
2657 unsigned int get_region_index_count () const
2658 { return regionIndices.len; }
2660 unsigned get_region_index (unsigned i) const
2661 { return i >= regionIndices.len ? -1 : regionIndices[i]; }
2663 unsigned int get_row_size () const
2664 { return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); }
2666 unsigned int get_size () const
2668 - regionIndices.min_size + regionIndices.get_size ()
2669 + itemCount * get_row_size ();
2672 float get_delta (unsigned int inner,
2673 const int *coords, unsigned int coord_count,
2674 const VarRegionList ®ions,
2675 VarRegionList::cache_t *cache = nullptr) const
2677 if (unlikely (inner >= itemCount))
2680 unsigned int count = regionIndices.len;
2681 bool is_long = longWords ();
2682 unsigned word_count = wordCount ();
2683 unsigned int scount = is_long ? count : word_count;
2684 unsigned int lcount = is_long ? word_count : 0;
2686 const HBUINT8 *bytes = get_delta_bytes ();
2687 const HBUINT8 *row = bytes + inner * get_row_size ();
2692 const HBINT32 *lcursor = reinterpret_cast<const HBINT32 *> (row);
2693 for (; i < lcount; i++)
2695 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
2696 delta += scalar * *lcursor++;
2698 const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (lcursor);
2699 for (; i < scount; i++)
2701 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
2702 delta += scalar * *scursor++;
2704 const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2705 for (; i < count; i++)
2707 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
2708 delta += scalar * *bcursor++;
2714 void get_region_scalars (const int *coords, unsigned int coord_count,
2715 const VarRegionList ®ions,
2716 float *scalars /*OUT */,
2717 unsigned int num_scalars) const
2719 unsigned count = hb_min (num_scalars, regionIndices.len);
2720 for (unsigned int i = 0; i < count; i++)
2721 scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2722 for (unsigned int i = count; i < num_scalars; i++)
2726 bool sanitize (hb_sanitize_context_t *c) const
2728 TRACE_SANITIZE (this);
2729 return_trace (c->check_struct (this) &&
2730 regionIndices.sanitize (c) &&
2731 wordCount () <= regionIndices.len &&
2732 c->check_range (get_delta_bytes (),
2737 bool serialize (hb_serialize_context_t *c,
2739 const hb_vector_t<const hb_vector_t<int>*>& rows)
2741 TRACE_SERIALIZE (this);
2742 if (unlikely (!c->extend_min (this))) return_trace (false);
2743 unsigned row_count = rows.length;
2744 itemCount = row_count;
2746 int min_threshold = has_long ? -65536 : -128;
2747 int max_threshold = has_long ? +65535 : +127;
2748 enum delta_size_t { kZero=0, kNonWord, kWord };
2749 hb_vector_t<delta_size_t> delta_sz;
2750 unsigned num_regions = rows[0]->length;
2751 if (!delta_sz.resize (num_regions))
2752 return_trace (false);
2754 unsigned word_count = 0;
2755 for (unsigned r = 0; r < num_regions; r++)
2757 for (unsigned i = 0; i < row_count; i++)
2759 int delta = rows[i]->arrayZ[r];
2760 if (delta < min_threshold || delta > max_threshold)
2762 delta_sz[r] = kWord;
2766 else if (delta != 0)
2768 delta_sz[r] = kNonWord;
2773 /* reorder regions: words and then non-words*/
2774 unsigned word_index = 0;
2775 unsigned non_word_index = word_count;
2777 for (unsigned r = 0; r < num_regions; r++)
2779 if (!delta_sz[r]) continue;
2780 unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
2781 if (!ri_map.set (new_r, r))
2782 return_trace (false);
2785 wordSizeCount = word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
2787 unsigned ri_count = ri_map.get_population ();
2788 regionIndices.len = ri_count;
2789 if (unlikely (!c->extend (this))) return_trace (false);
2791 for (unsigned r = 0; r < ri_count; r++)
2793 hb_codepoint_t *idx;
2794 if (!ri_map.has (r, &idx))
2795 return_trace (false);
2796 regionIndices[r] = *idx;
2799 HBUINT8 *delta_bytes = get_delta_bytes ();
2800 unsigned row_size = get_row_size ();
2801 for (unsigned int i = 0; i < row_count; i++)
2803 for (unsigned int r = 0; r < ri_count; r++)
2805 int delta = rows[i]->arrayZ[ri_map[r]];
2806 set_item_delta_fast (i, r, delta, delta_bytes, row_size);
2809 return_trace (true);
2812 bool serialize (hb_serialize_context_t *c,
2814 const hb_inc_bimap_t &inner_map,
2815 const hb_inc_bimap_t ®ion_map)
2817 TRACE_SERIALIZE (this);
2818 if (unlikely (!c->extend_min (this))) return_trace (false);
2819 itemCount = inner_map.get_next_value ();
2821 /* Optimize word count */
2822 unsigned ri_count = src->regionIndices.len;
2823 enum delta_size_t { kZero=0, kNonWord, kWord };
2824 hb_vector_t<delta_size_t> delta_sz;
2825 hb_vector_t<unsigned int> ri_map; /* maps new index to old index */
2826 delta_sz.resize (ri_count);
2827 ri_map.resize (ri_count);
2828 unsigned int new_word_count = 0;
2831 const HBUINT8 *src_delta_bytes = src->get_delta_bytes ();
2832 unsigned src_row_size = src->get_row_size ();
2833 unsigned src_word_count = src->wordCount ();
2834 bool src_long_words = src->longWords ();
2836 bool has_long = false;
2839 for (r = 0; r < src_word_count; r++)
2841 for (unsigned old_gid : inner_map.keys())
2843 int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size);
2844 if (delta < -65536 || 65535 < delta)
2853 signed min_threshold = has_long ? -65536 : -128;
2854 signed max_threshold = has_long ? +65535 : +127;
2855 for (r = 0; r < ri_count; r++)
2857 bool short_circuit = src_long_words == has_long && src_word_count <= r;
2859 delta_sz[r] = kZero;
2860 for (unsigned old_gid : inner_map.keys())
2862 int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size);
2863 if (delta < min_threshold || max_threshold < delta)
2865 delta_sz[r] = kWord;
2869 else if (delta != 0)
2871 delta_sz[r] = kNonWord;
2878 unsigned int word_index = 0;
2879 unsigned int non_word_index = new_word_count;
2880 unsigned int new_ri_count = 0;
2881 for (r = 0; r < ri_count; r++)
2884 unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
2889 wordSizeCount = new_word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
2891 regionIndices.len = new_ri_count;
2893 if (unlikely (!c->extend (this))) return_trace (false);
2895 for (r = 0; r < new_ri_count; r++)
2896 regionIndices[r] = region_map[src->regionIndices[ri_map[r]]];
2898 HBUINT8 *delta_bytes = get_delta_bytes ();
2899 unsigned row_size = get_row_size ();
2900 unsigned count = itemCount;
2901 for (unsigned int i = 0; i < count; i++)
2903 unsigned int old = inner_map.backward (i);
2904 for (unsigned int r = 0; r < new_ri_count; r++)
2905 set_item_delta_fast (i, r,
2906 src->get_item_delta_fast (old, ri_map[r],
2907 src_delta_bytes, src_row_size),
2908 delta_bytes, row_size);
2911 return_trace (true);
2914 void collect_region_refs (hb_set_t ®ion_indices, const hb_inc_bimap_t &inner_map) const
2916 const HBUINT8 *delta_bytes = get_delta_bytes ();
2917 unsigned row_size = get_row_size ();
2919 for (unsigned int r = 0; r < regionIndices.len; r++)
2921 unsigned int region = regionIndices.arrayZ[r];
2922 if (region_indices.has (region)) continue;
2923 for (hb_codepoint_t old_gid : inner_map.keys())
2924 if (get_item_delta_fast (old_gid, r, delta_bytes, row_size) != 0)
2926 region_indices.add (region);
2933 const HBUINT8 *get_delta_bytes () const
2934 { return &StructAfter<HBUINT8> (regionIndices); }
2937 HBUINT8 *get_delta_bytes ()
2938 { return &StructAfter<HBUINT8> (regionIndices); }
2941 int32_t get_item_delta_fast (unsigned int item, unsigned int region,
2942 const HBUINT8 *delta_bytes, unsigned row_size) const
2944 if (unlikely (item >= itemCount || region >= regionIndices.len)) return 0;
2946 const HBINT8 *p = (const HBINT8 *) delta_bytes + item * row_size;
2947 unsigned word_count = wordCount ();
2948 bool is_long = longWords ();
2951 if (region < word_count)
2952 return ((const HBINT32 *) p)[region];
2954 return ((const HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count];
2958 if (region < word_count)
2959 return ((const HBINT16 *) p)[region];
2961 return (p + HBINT16::static_size * word_count)[region - word_count];
2964 int32_t get_item_delta (unsigned int item, unsigned int region) const
2966 return get_item_delta_fast (item, region,
2972 void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
2973 HBUINT8 *delta_bytes, unsigned row_size)
2975 HBINT8 *p = (HBINT8 *) delta_bytes + item * row_size;
2976 unsigned word_count = wordCount ();
2977 bool is_long = longWords ();
2980 if (region < word_count)
2981 ((HBINT32 *) p)[region] = delta;
2983 ((HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count] = delta;
2987 if (region < word_count)
2988 ((HBINT16 *) p)[region] = delta;
2990 (p + HBINT16::static_size * word_count)[region - word_count] = delta;
2993 void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
2995 set_item_delta_fast (item, region, delta,
3000 bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; }
3001 unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; }
3005 HBUINT16 wordSizeCount;
3006 Array16Of<HBUINT16> regionIndices;
3007 /*UnsizedArrayOf<HBUINT8>bytesX;*/
3009 DEFINE_SIZE_ARRAY (6, regionIndices);
3012 struct VariationStore
3014 friend struct item_variations_t;
3015 using cache_t = VarRegionList::cache_t;
3017 cache_t *create_cache () const
3022 auto &r = this+regions;
3023 unsigned count = r.regionCount;
3025 float *cache = (float *) hb_malloc (sizeof (float) * count);
3026 if (unlikely (!cache)) return nullptr;
3028 for (unsigned i = 0; i < count; i++)
3029 cache[i] = REGION_CACHE_ITEM_CACHE_INVALID;
3034 static void destroy_cache (cache_t *cache) { hb_free (cache); }
3037 float get_delta (unsigned int outer, unsigned int inner,
3038 const int *coords, unsigned int coord_count,
3039 VarRegionList::cache_t *cache = nullptr) const
3045 if (unlikely (outer >= dataSets.len))
3048 return (this+dataSets[outer]).get_delta (inner,
3049 coords, coord_count,
3055 float get_delta (unsigned int index,
3056 const int *coords, unsigned int coord_count,
3057 VarRegionList::cache_t *cache = nullptr) const
3059 unsigned int outer = index >> 16;
3060 unsigned int inner = index & 0xFFFF;
3061 return get_delta (outer, inner, coords, coord_count, cache);
3063 float get_delta (unsigned int index,
3064 hb_array_t<int> coords,
3065 VarRegionList::cache_t *cache = nullptr) const
3067 return get_delta (index,
3068 coords.arrayZ, coords.length,
3072 bool sanitize (hb_sanitize_context_t *c) const
3078 TRACE_SANITIZE (this);
3079 return_trace (c->check_struct (this) &&
3081 regions.sanitize (c, this) &&
3082 dataSets.sanitize (c, this));
3085 bool serialize (hb_serialize_context_t *c,
3087 const hb_vector_t<hb_tag_t>& axis_tags,
3088 const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& region_list,
3089 const hb_vector_t<delta_row_encoding_t>& vardata_encodings)
3091 TRACE_SERIALIZE (this);
3093 return_trace (false);
3095 if (unlikely (!c->extend_min (this))) return_trace (false);
3098 if (!regions.serialize_serialize (c, axis_tags, region_list))
3099 return_trace (false);
3101 unsigned num_var_data = vardata_encodings.length;
3102 if (!num_var_data) return_trace (false);
3103 if (unlikely (!c->check_assign (dataSets.len, num_var_data,
3104 HB_SERIALIZE_ERROR_INT_OVERFLOW)))
3105 return_trace (false);
3107 if (unlikely (!c->extend (dataSets))) return_trace (false);
3108 for (unsigned i = 0; i < num_var_data; i++)
3109 if (!dataSets[i].serialize_serialize (c, has_long, vardata_encodings[i].items))
3110 return_trace (false);
3112 return_trace (true);
3115 bool serialize (hb_serialize_context_t *c,
3116 const VariationStore *src,
3117 const hb_array_t <const hb_inc_bimap_t> &inner_maps)
3119 TRACE_SERIALIZE (this);
3121 return_trace (false);
3124 if (unlikely (!c->extend_min (this))) return_trace (false);
3126 unsigned int set_count = 0;
3127 for (unsigned int i = 0; i < inner_maps.length; i++)
3128 if (inner_maps[i].get_population ())
3133 const auto &src_regions = src+src->regions;
3135 hb_set_t region_indices;
3136 for (unsigned int i = 0; i < inner_maps.length; i++)
3137 (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]);
3139 if (region_indices.in_error ())
3140 return_trace (false);
3142 region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID);
3144 /* TODO use constructor when our data-structures support that. */
3145 hb_inc_bimap_t region_map;
3146 + hb_iter (region_indices)
3147 | hb_apply ([®ion_map] (unsigned _) { region_map.add(_); })
3149 if (region_map.in_error())
3150 return_trace (false);
3152 if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map)))
3153 return_trace (false);
3155 dataSets.len = set_count;
3156 if (unlikely (!c->extend (dataSets))) return_trace (false);
3158 /* TODO: The following code could be simplified when
3159 * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */
3160 unsigned int set_index = 0;
3161 for (unsigned int i = 0; i < inner_maps.length; i++)
3163 if (!inner_maps[i].get_population ()) continue;
3164 if (unlikely (!dataSets[set_index++]
3165 .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
3166 return_trace (false);
3169 return_trace (true);
3172 VariationStore *copy (hb_serialize_context_t *c) const
3174 TRACE_SERIALIZE (this);
3175 auto *out = c->start_embed (this);
3176 if (unlikely (!out)) return_trace (nullptr);
3178 hb_vector_t <hb_inc_bimap_t> inner_maps;
3179 unsigned count = dataSets.len;
3180 for (unsigned i = 0; i < count; i++)
3182 hb_inc_bimap_t *map = inner_maps.push ();
3183 auto &data = this+dataSets[i];
3185 unsigned itemCount = data.get_item_count ();
3186 for (unsigned j = 0; j < itemCount; j++)
3190 if (unlikely (!out->serialize (c, this, inner_maps))) return_trace (nullptr);
3195 bool subset (hb_subset_context_t *c, const hb_array_t<const hb_inc_bimap_t> &inner_maps) const
3197 TRACE_SUBSET (this);
3199 return_trace (false);
3202 VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
3203 if (unlikely (!varstore_prime)) return_trace (false);
3205 varstore_prime->serialize (c->serializer, this, inner_maps);
3208 !c->serializer->in_error()
3209 && varstore_prime->dataSets);
3212 unsigned int get_region_index_count (unsigned int major) const
3217 return (this+dataSets[major]).get_region_index_count ();
3220 void get_region_scalars (unsigned int major,
3221 const int *coords, unsigned int coord_count,
3222 float *scalars /*OUT*/,
3223 unsigned int num_scalars) const
3226 for (unsigned i = 0; i < num_scalars; i++)
3231 (this+dataSets[major]).get_region_scalars (coords, coord_count,
3233 &scalars[0], num_scalars);
3236 unsigned int get_sub_table_count () const
3241 return dataSets.len;
3244 const VarData& get_sub_table (unsigned i) const
3247 return Null (VarData);
3249 return this+dataSets[i];
3252 const VarRegionList& get_region_list () const
3255 return Null (VarRegionList);
3257 return this+regions;
3262 Offset32To<VarRegionList> regions;
3263 Array16OfOffset32To<VarData> dataSets;
3265 DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
3268 #undef REGION_CACHE_ITEM_CACHE_INVALID
3271 * Feature Variations
3273 enum Cond_with_Var_flag_t
3275 KEEP_COND_WITH_VAR = 0,
3276 KEEP_RECORD_WITH_VAR = 1,
3277 DROP_COND_WITH_VAR = 2,
3278 DROP_RECORD_WITH_VAR = 3,
3281 struct ConditionFormat1
3283 friend struct Condition;
3285 bool subset (hb_subset_context_t *c) const
3287 TRACE_SUBSET (this);
3288 auto *out = c->serializer->embed (this);
3289 if (unlikely (!out)) return_trace (false);
3291 const hb_map_t *index_map = &c->plan->axes_index_map;
3292 if (index_map->is_empty ()) return_trace (true);
3294 const hb_map_t& axes_old_index_tag_map = c->plan->axes_old_index_tag_map;
3295 hb_codepoint_t *axis_tag;
3296 if (!axes_old_index_tag_map.has (axisIndex, &axis_tag) ||
3297 !index_map->has (axisIndex))
3298 return_trace (false);
3300 const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location = c->plan->axes_location;
3301 Triple axis_limit{-1.f, 0.f, 1.f};
3302 Triple *normalized_limit;
3303 if (normalized_axes_location.has (*axis_tag, &normalized_limit))
3304 axis_limit = *normalized_limit;
3306 const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances = c->plan->axes_triple_distances;
3307 TripleDistances axis_triple_distances{1.f, 1.f};
3308 TripleDistances *triple_dists;
3309 if (axes_triple_distances.has (*axis_tag, &triple_dists))
3310 axis_triple_distances = *triple_dists;
3312 float normalized_min = renormalizeValue (filterRangeMinValue.to_float (), axis_limit, axis_triple_distances, false);
3313 float normalized_max = renormalizeValue (filterRangeMaxValue.to_float (), axis_limit, axis_triple_distances, false);
3314 out->filterRangeMinValue.set_float (normalized_min);
3315 out->filterRangeMaxValue.set_float (normalized_max);
3317 return_trace (c->serializer->check_assign (out->axisIndex, index_map->get (axisIndex),
3318 HB_SERIALIZE_ERROR_INT_OVERFLOW));
3322 Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
3323 hb_map_t *condition_map /* OUT */) const
3325 //invalid axis index, drop the entire record
3326 if (!c->axes_index_tag_map->has (axisIndex))
3327 return DROP_RECORD_WITH_VAR;
3329 hb_tag_t axis_tag = c->axes_index_tag_map->get (axisIndex);
3331 Triple axis_range (-1.f, 0.f, 1.f);
3333 if (c->axes_location->has (axis_tag, &axis_limit))
3334 axis_range = *axis_limit;
3336 float axis_min_val = axis_range.minimum;
3337 float axis_default_val = axis_range.middle;
3338 float axis_max_val = axis_range.maximum;
3340 float filter_min_val = filterRangeMinValue.to_float ();
3341 float filter_max_val = filterRangeMaxValue.to_float ();
3343 if (axis_default_val < filter_min_val ||
3344 axis_default_val > filter_max_val)
3347 //condition not met, drop the entire record
3348 if (axis_min_val > filter_max_val || axis_max_val < filter_min_val ||
3349 filter_min_val > filter_max_val)
3350 return DROP_RECORD_WITH_VAR;
3352 //condition met and axis pinned, drop the condition
3353 if (c->axes_location->has (axis_tag) &&
3354 c->axes_location->get (axis_tag).is_point ())
3355 return DROP_COND_WITH_VAR;
3357 if (filter_max_val != axis_max_val || filter_min_val != axis_min_val)
3359 // add axisIndex->value into the hashmap so we can check if the record is
3360 // unique with variations
3361 int16_t int_filter_max_val = filterRangeMaxValue.to_int ();
3362 int16_t int_filter_min_val = filterRangeMinValue.to_int ();
3363 hb_codepoint_t val = (int_filter_max_val << 16) + int_filter_min_val;
3365 condition_map->set (axisIndex, val);
3366 return KEEP_COND_WITH_VAR;
3369 return KEEP_RECORD_WITH_VAR;
3372 bool evaluate (const int *coords, unsigned int coord_len) const
3374 int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
3375 return filterRangeMinValue.to_int () <= coord && coord <= filterRangeMaxValue.to_int ();
3378 bool sanitize (hb_sanitize_context_t *c) const
3380 TRACE_SANITIZE (this);
3381 return_trace (c->check_struct (this));
3385 HBUINT16 format; /* Format identifier--format = 1 */
3387 F2DOT14 filterRangeMinValue;
3388 F2DOT14 filterRangeMaxValue;
3390 DEFINE_SIZE_STATIC (8);
3395 bool evaluate (const int *coords, unsigned int coord_len) const
3398 case 1: return u.format1.evaluate (coords, coord_len);
3399 default:return false;
3403 Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
3404 hb_map_t *condition_map /* OUT */) const
3407 case 1: return u.format1.keep_with_variations (c, condition_map);
3408 default: c->apply = false; return KEEP_COND_WITH_VAR;
3412 template <typename context_t, typename ...Ts>
3413 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3415 if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
3416 TRACE_DISPATCH (this, u.format);
3418 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3419 default:return_trace (c->default_return_value ());
3423 bool sanitize (hb_sanitize_context_t *c) const
3425 TRACE_SANITIZE (this);
3426 if (!u.format.sanitize (c)) return_trace (false);
3428 case 1: return_trace (u.format1.sanitize (c));
3429 default:return_trace (true);
3435 HBUINT16 format; /* Format identifier */
3436 ConditionFormat1 format1;
3439 DEFINE_SIZE_UNION (2, format);
3444 bool evaluate (const int *coords, unsigned int coord_len) const
3446 unsigned int count = conditions.len;
3447 for (unsigned int i = 0; i < count; i++)
3448 if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
3453 void keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
3455 hb_map_t *condition_map = hb_map_create ();
3456 if (unlikely (!condition_map)) return;
3457 hb::shared_ptr<hb_map_t> p {condition_map};
3459 hb_set_t *cond_set = hb_set_create ();
3460 if (unlikely (!cond_set)) return;
3461 hb::shared_ptr<hb_set_t> s {cond_set};
3464 bool should_keep = false;
3465 unsigned num_kept_cond = 0, cond_idx = 0;
3466 for (const auto& offset : conditions)
3468 Cond_with_Var_flag_t ret = (this+offset).keep_with_variations (c, condition_map);
3469 // condition is not met or condition out of range, drop the entire record
3470 if (ret == DROP_RECORD_WITH_VAR)
3473 if (ret == KEEP_COND_WITH_VAR)
3476 cond_set->add (cond_idx);
3480 if (ret == KEEP_RECORD_WITH_VAR)
3486 if (!should_keep) return;
3488 //check if condition_set is unique with variations
3489 if (c->conditionset_map->has (p))
3490 //duplicate found, drop the entire record
3493 c->conditionset_map->set (p, 1);
3494 c->record_cond_idx_map->set (c->cur_record_idx, s);
3495 if (should_keep && num_kept_cond == 0)
3496 c->universal = true;
3499 bool subset (hb_subset_context_t *c,
3500 hb_subset_layout_context_t *l) const
3502 TRACE_SUBSET (this);
3503 auto *out = c->serializer->start_embed (this);
3504 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3506 hb_set_t *retained_cond_set = nullptr;
3507 if (l->feature_record_cond_idx_map != nullptr)
3508 retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx);
3510 unsigned int count = conditions.len;
3511 for (unsigned int i = 0; i < count; i++)
3513 if (retained_cond_set != nullptr && !retained_cond_set->has (i))
3515 subset_offset_array (c, out->conditions, this) (conditions[i]);
3518 return_trace (bool (out->conditions));
3521 bool sanitize (hb_sanitize_context_t *c) const
3523 TRACE_SANITIZE (this);
3524 return_trace (conditions.sanitize (c, this));
3528 Array16OfOffset32To<Condition> conditions;
3530 DEFINE_SIZE_ARRAY (2, conditions);
3533 struct FeatureTableSubstitutionRecord
3535 friend struct FeatureTableSubstitution;
3537 void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
3539 return (base+feature).add_lookup_indexes_to (lookup_indexes);
3542 void closure_features (const void *base,
3543 const hb_map_t *lookup_indexes,
3544 hb_set_t *feature_indexes /* OUT */) const
3546 if ((base+feature).intersects_lookup_indexes (lookup_indexes))
3547 feature_indexes->add (featureIndex);
3550 void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
3551 const hb_set_t *feature_indices,
3552 const void *base) const
3554 if (feature_indices->has (featureIndex))
3555 feature_substitutes_map->set (featureIndex, &(base+feature));
3558 bool subset (hb_subset_layout_context_t *c, const void *base) const
3560 TRACE_SUBSET (this);
3561 if (!c->feature_index_map->has (featureIndex) ||
3562 c->feature_substitutes_map->has (featureIndex)) {
3563 // Feature that is being substituted is not being retained, so we don't
3565 return_trace (false);
3568 auto *out = c->subset_context->serializer->embed (this);
3569 if (unlikely (!out)) return_trace (false);
3571 out->featureIndex = c->feature_index_map->get (featureIndex);
3572 return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c));
3575 bool sanitize (hb_sanitize_context_t *c, const void *base) const
3577 TRACE_SANITIZE (this);
3578 return_trace (c->check_struct (this) && feature.sanitize (c, base));
3582 HBUINT16 featureIndex;
3583 Offset32To<Feature> feature;
3585 DEFINE_SIZE_STATIC (6);
3588 struct FeatureTableSubstitution
3590 const Feature *find_substitute (unsigned int feature_index) const
3592 unsigned int count = substitutions.len;
3593 for (unsigned int i = 0; i < count; i++)
3595 const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
3596 if (record.featureIndex == feature_index)
3597 return &(this+record.feature);
3602 void collect_lookups (const hb_set_t *feature_indexes,
3603 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
3604 hb_set_t *lookup_indexes /* OUT */) const
3606 + hb_iter (substitutions)
3607 | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
3608 | hb_filter ([feature_substitutes_map] (const FeatureTableSubstitutionRecord& record)
3610 if (feature_substitutes_map == nullptr) return true;
3611 return !feature_substitutes_map->has (record.featureIndex);
3613 | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
3614 { r.collect_lookups (this, lookup_indexes); })
3618 void closure_features (const hb_map_t *lookup_indexes,
3619 hb_set_t *feature_indexes /* OUT */) const
3621 for (const FeatureTableSubstitutionRecord& record : substitutions)
3622 record.closure_features (this, lookup_indexes, feature_indexes);
3625 bool intersects_features (const hb_map_t *feature_index_map) const
3627 for (const FeatureTableSubstitutionRecord& record : substitutions)
3629 if (feature_index_map->has (record.featureIndex)) return true;
3634 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
3636 for (const FeatureTableSubstitutionRecord& record : substitutions)
3637 record.collect_feature_substitutes_with_variations (c->feature_substitutes_map, c->feature_indices, this);
3640 bool subset (hb_subset_context_t *c,
3641 hb_subset_layout_context_t *l) const
3643 TRACE_SUBSET (this);
3644 auto *out = c->serializer->start_embed (*this);
3645 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3647 out->version.major = version.major;
3648 out->version.minor = version.minor;
3650 + substitutions.iter ()
3651 | hb_apply (subset_record_array (l, &(out->substitutions), this))
3654 return_trace (bool (out->substitutions));
3657 bool sanitize (hb_sanitize_context_t *c) const
3659 TRACE_SANITIZE (this);
3660 return_trace (version.sanitize (c) &&
3661 likely (version.major == 1) &&
3662 substitutions.sanitize (c, this));
3666 FixedVersion<> version; /* Version--0x00010000u */
3667 Array16Of<FeatureTableSubstitutionRecord>
3670 DEFINE_SIZE_ARRAY (6, substitutions);
3673 struct FeatureVariationRecord
3675 friend struct FeatureVariations;
3677 void collect_lookups (const void *base,
3678 const hb_set_t *feature_indexes,
3679 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
3680 hb_set_t *lookup_indexes /* OUT */) const
3682 return (base+substitutions).collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
3685 void closure_features (const void *base,
3686 const hb_map_t *lookup_indexes,
3687 hb_set_t *feature_indexes /* OUT */) const
3689 (base+substitutions).closure_features (lookup_indexes, feature_indexes);
3692 bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
3694 return (base+substitutions).intersects_features (feature_index_map);
3697 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
3698 const void *base) const
3700 (base+conditions).keep_with_variations (c);
3701 if (c->apply && !c->variation_applied)
3703 (base+substitutions).collect_feature_substitutes_with_variations (c);
3704 c->variation_applied = true; // set variations only once
3708 bool subset (hb_subset_layout_context_t *c, const void *base) const
3710 TRACE_SUBSET (this);
3711 auto *out = c->subset_context->serializer->embed (this);
3712 if (unlikely (!out)) return_trace (false);
3714 out->conditions.serialize_subset (c->subset_context, conditions, base, c);
3715 out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
3717 return_trace (true);
3720 bool sanitize (hb_sanitize_context_t *c, const void *base) const
3722 TRACE_SANITIZE (this);
3723 return_trace (conditions.sanitize (c, base) &&
3724 substitutions.sanitize (c, base));
3728 Offset32To<ConditionSet>
3730 Offset32To<FeatureTableSubstitution>
3733 DEFINE_SIZE_STATIC (8);
3736 struct FeatureVariations
3738 static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
3740 bool find_index (const int *coords, unsigned int coord_len,
3741 unsigned int *index) const
3743 unsigned int count = varRecords.len;
3744 for (unsigned int i = 0; i < count; i++)
3746 const FeatureVariationRecord &record = varRecords.arrayZ[i];
3747 if ((this+record.conditions).evaluate (coords, coord_len))
3753 *index = NOT_FOUND_INDEX;
3757 const Feature *find_substitute (unsigned int variations_index,
3758 unsigned int feature_index) const
3760 const FeatureVariationRecord &record = varRecords[variations_index];
3761 return (this+record.substitutions).find_substitute (feature_index);
3764 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
3766 unsigned int count = varRecords.len;
3767 for (unsigned int i = 0; i < count; i++)
3769 c->cur_record_idx = i;
3770 varRecords[i].collect_feature_substitutes_with_variations (c, this);
3774 if (c->variation_applied && !c->universal &&
3775 !c->record_cond_idx_map->is_empty ())
3776 c->insert_catch_all_feature_variation_record = true;
3779 FeatureVariations* copy (hb_serialize_context_t *c) const
3781 TRACE_SERIALIZE (this);
3782 return_trace (c->embed (*this));
3785 void collect_lookups (const hb_set_t *feature_indexes,
3786 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
3787 hb_set_t *lookup_indexes /* OUT */) const
3789 for (const FeatureVariationRecord& r : varRecords)
3790 r.collect_lookups (this, feature_indexes, feature_substitutes_map, lookup_indexes);
3793 void closure_features (const hb_map_t *lookup_indexes,
3794 const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
3795 hb_set_t *feature_indexes /* OUT */) const
3797 unsigned int count = varRecords.len;
3798 for (unsigned int i = 0; i < count; i++)
3800 if (feature_record_cond_idx_map != nullptr &&
3801 !feature_record_cond_idx_map->has (i))
3803 varRecords[i].closure_features (this, lookup_indexes, feature_indexes);
3807 bool subset (hb_subset_context_t *c,
3808 hb_subset_layout_context_t *l) const
3810 TRACE_SUBSET (this);
3811 auto *out = c->serializer->start_embed (*this);
3812 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3814 out->version.major = version.major;
3815 out->version.minor = version.minor;
3817 int keep_up_to = -1;
3818 for (int i = varRecords.len - 1; i >= 0; i--) {
3819 if (varRecords[i].intersects_features (this, l->feature_index_map)) {
3825 unsigned count = (unsigned) (keep_up_to + 1);
3826 for (unsigned i = 0; i < count; i++)
3828 if (l->feature_record_cond_idx_map != nullptr &&
3829 !l->feature_record_cond_idx_map->has (i))
3832 l->cur_feature_var_record_idx = i;
3833 subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
3835 return_trace (bool (out->varRecords));
3838 bool sanitize (hb_sanitize_context_t *c) const
3840 TRACE_SANITIZE (this);
3841 return_trace (version.sanitize (c) &&
3842 likely (version.major == 1) &&
3843 varRecords.sanitize (c, this));
3847 FixedVersion<> version; /* Version--0x00010000u */
3848 Array32Of<FeatureVariationRecord>
3851 DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
3859 struct HintingDevice
3861 friend struct Device;
3865 hb_position_t get_x_delta (hb_font_t *font) const
3866 { return get_delta (font->x_ppem, font->x_scale); }
3868 hb_position_t get_y_delta (hb_font_t *font) const
3869 { return get_delta (font->y_ppem, font->y_scale); }
3873 unsigned int get_size () const
3875 unsigned int f = deltaFormat;
3876 if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
3877 return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
3880 bool sanitize (hb_sanitize_context_t *c) const
3882 TRACE_SANITIZE (this);
3883 return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
3886 HintingDevice* copy (hb_serialize_context_t *c) const
3888 TRACE_SERIALIZE (this);
3889 return_trace (c->embed<HintingDevice> (this));
3894 int get_delta (unsigned int ppem, int scale) const
3896 if (!ppem) return 0;
3898 int pixels = get_delta_pixels (ppem);
3900 if (!pixels) return 0;
3902 return (int) (pixels * (int64_t) scale / ppem);
3904 int get_delta_pixels (unsigned int ppem_size) const
3906 unsigned int f = deltaFormat;
3907 if (unlikely (f < 1 || f > 3))
3910 if (ppem_size < startSize || ppem_size > endSize)
3913 unsigned int s = ppem_size - startSize;
3915 unsigned int byte = deltaValueZ[s >> (4 - f)];
3916 unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
3917 unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
3919 int delta = bits & mask;
3921 if ((unsigned int) delta >= ((mask + 1) >> 1))
3928 HBUINT16 startSize; /* Smallest size to correct--in ppem */
3929 HBUINT16 endSize; /* Largest size to correct--in ppem */
3930 HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3
3931 * 1 Signed 2-bit value, 8 values per uint16
3932 * 2 Signed 4-bit value, 4 values per uint16
3933 * 3 Signed 8-bit value, 2 values per uint16
3935 UnsizedArrayOf<HBUINT16>
3936 deltaValueZ; /* Array of compressed data */
3938 DEFINE_SIZE_ARRAY (6, deltaValueZ);
3941 struct VariationDevice
3943 friend struct Device;
3947 hb_position_t get_x_delta (hb_font_t *font,
3948 const VariationStore &store,
3949 VariationStore::cache_t *store_cache = nullptr) const
3950 { return font->em_scalef_x (get_delta (font, store, store_cache)); }
3952 hb_position_t get_y_delta (hb_font_t *font,
3953 const VariationStore &store,
3954 VariationStore::cache_t *store_cache = nullptr) const
3955 { return font->em_scalef_y (get_delta (font, store, store_cache)); }
3957 VariationDevice* copy (hb_serialize_context_t *c,
3958 const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
3960 TRACE_SERIALIZE (this);
3961 if (!layout_variation_idx_delta_map) return_trace (nullptr);
3963 hb_pair_t<unsigned, int> *v;
3964 if (!layout_variation_idx_delta_map->has (varIdx, &v))
3965 return_trace (nullptr);
3967 c->start_zerocopy (this->static_size);
3968 auto *out = c->embed (this);
3969 if (unlikely (!out)) return_trace (nullptr);
3971 if (!c->check_assign (out->varIdx, hb_first (*v), HB_SERIALIZE_ERROR_INT_OVERFLOW))
3972 return_trace (nullptr);
3976 void collect_variation_index (hb_collect_variation_indices_context_t *c) const
3977 { c->layout_variation_indices->add (varIdx); }
3979 bool sanitize (hb_sanitize_context_t *c) const
3981 TRACE_SANITIZE (this);
3982 return_trace (c->check_struct (this));
3987 float get_delta (hb_font_t *font,
3988 const VariationStore &store,
3989 VariationStore::cache_t *store_cache = nullptr) const
3991 return store.get_delta (varIdx, font->coords, font->num_coords, (VariationStore::cache_t *) store_cache);
3996 HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */
3998 DEFINE_SIZE_STATIC (6);
4007 HBUINT16 format; /* Format identifier */
4009 DEFINE_SIZE_STATIC (6);
4014 hb_position_t get_x_delta (hb_font_t *font,
4015 const VariationStore &store=Null (VariationStore),
4016 VariationStore::cache_t *store_cache = nullptr) const
4020 #ifndef HB_NO_HINTING
4021 case 1: case 2: case 3:
4022 return u.hinting.get_x_delta (font);
4026 return u.variation.get_x_delta (font, store, store_cache);
4032 hb_position_t get_y_delta (hb_font_t *font,
4033 const VariationStore &store=Null (VariationStore),
4034 VariationStore::cache_t *store_cache = nullptr) const
4038 case 1: case 2: case 3:
4039 #ifndef HB_NO_HINTING
4040 return u.hinting.get_y_delta (font);
4044 return u.variation.get_y_delta (font, store, store_cache);
4051 bool sanitize (hb_sanitize_context_t *c) const
4053 TRACE_SANITIZE (this);
4054 if (!u.b.format.sanitize (c)) return_trace (false);
4055 switch (u.b.format) {
4056 #ifndef HB_NO_HINTING
4057 case 1: case 2: case 3:
4058 return_trace (u.hinting.sanitize (c));
4062 return_trace (u.variation.sanitize (c));
4065 return_trace (true);
4069 Device* copy (hb_serialize_context_t *c,
4070 const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map=nullptr) const
4072 TRACE_SERIALIZE (this);
4073 switch (u.b.format) {
4074 #ifndef HB_NO_HINTING
4078 return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
4082 return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_delta_map)));
4085 return_trace (nullptr);
4089 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
4091 switch (u.b.format) {
4092 #ifndef HB_NO_HINTING
4100 u.variation.collect_variation_index (c);
4108 unsigned get_variation_index () const
4110 switch (u.b.format) {
4113 return u.variation.varIdx;
4116 return HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
4123 HintingDevice hinting;
4125 VariationDevice variation;
4129 DEFINE_SIZE_UNION (6, b);
4133 } /* namespace OT */
4136 #endif /* HB_OT_LAYOUT_COMMON_HH */