2 * Copyright © 2007,2008,2009 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
36 #include "hb-bimap.hh"
39 #ifndef HB_MAX_NESTING_LEVEL
40 #define HB_MAX_NESTING_LEVEL 6
42 #ifndef HB_MAX_CONTEXT_LENGTH
43 #define HB_MAX_CONTEXT_LENGTH 64
45 #ifndef HB_CLOSURE_MAX_STAGES
47 * The maximum number of times a lookup can be applied during shaping.
48 * Used to limit the number of iterations of the closure algorithm.
49 * This must be larger than the number of times add_pause() is
50 * called in a collect_features call of any shaper.
52 #define HB_CLOSURE_MAX_STAGES 32
55 #ifndef HB_MAX_SCRIPTS
56 #define HB_MAX_SCRIPTS 500
59 #ifndef HB_MAX_LANGSYS
60 #define HB_MAX_LANGSYS 2000
63 #ifndef HB_MAX_FEATURES
64 #define HB_MAX_FEATURES 750
67 #ifndef HB_MAX_FEATURE_INDICES
68 #define HB_MAX_FEATURE_INDICES 1500
71 #ifndef HB_MAX_LOOKUP_VISIT_COUNT
72 #define HB_MAX_LOOKUP_VISIT_COUNT 35000
79 #define NOT_COVERED ((unsigned int) -1)
82 template<typename Iterator>
83 static inline void Coverage_serialize (hb_serialize_context_t *c,
86 template<typename Iterator>
87 static inline void ClassDef_serialize (hb_serialize_context_t *c,
90 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
91 const hb_map_t &gid_klass_map,
92 hb_sorted_vector_t<HBGlyphID16> &glyphs,
93 const hb_set_t &klasses,
95 hb_map_t *klass_map /*INOUT*/);
98 struct hb_prune_langsys_context_t
100 hb_prune_langsys_context_t (const void *table_,
101 hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
102 const hb_map_t *duplicate_feature_map_,
103 hb_set_t *new_collected_feature_indexes_)
105 script_langsys_map (script_langsys_map_),
106 duplicate_feature_map (duplicate_feature_map_),
107 new_feature_indexes (new_collected_feature_indexes_),
108 script_count (0),langsys_count (0) {}
110 bool visitedScript (const void *s)
112 if (script_count++ > HB_MAX_SCRIPTS)
115 return visited (s, visited_script);
118 bool visitedLangsys (const void *l)
120 if (langsys_count++ > HB_MAX_LANGSYS)
123 return visited (l, visited_langsys);
127 template <typename T>
128 bool visited (const T *p, hb_set_t &visited_set)
130 hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) p - (uintptr_t) table);
131 if (visited_set.in_error () || visited_set.has (delta))
134 visited_set.add (delta);
140 hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
141 const hb_map_t *duplicate_feature_map;
142 hb_set_t *new_feature_indexes;
145 hb_set_t visited_script;
146 hb_set_t visited_langsys;
147 unsigned script_count;
148 unsigned langsys_count;
151 struct hb_subset_layout_context_t :
152 hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
154 const char *get_name () { return "SUBSET_LAYOUT"; }
155 static return_t default_return_value () { return hb_empty_t (); }
159 return script_count++ < HB_MAX_SCRIPTS;
164 return langsys_count++ < HB_MAX_LANGSYS;
167 bool visitFeatureIndex (int count)
169 feature_index_count += count;
170 return feature_index_count < HB_MAX_FEATURE_INDICES;
173 bool visitLookupIndex()
175 lookup_index_count++;
176 return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT;
179 hb_subset_context_t *subset_context;
180 const hb_tag_t table_tag;
181 const hb_map_t *lookup_index_map;
182 const hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
183 const hb_map_t *feature_index_map;
184 unsigned cur_script_index;
186 hb_subset_layout_context_t (hb_subset_context_t *c_,
188 hb_map_t *lookup_map_,
189 hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
190 hb_map_t *feature_index_map_) :
193 lookup_index_map (lookup_map_),
194 script_langsys_map (script_langsys_map_),
195 feature_index_map (feature_index_map_),
196 cur_script_index (0xFFFFu),
199 feature_index_count (0),
200 lookup_index_count (0)
204 unsigned script_count;
205 unsigned langsys_count;
206 unsigned feature_index_count;
207 unsigned lookup_index_count;
210 struct hb_collect_variation_indices_context_t :
211 hb_dispatch_context_t<hb_collect_variation_indices_context_t>
213 template <typename T>
214 return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
215 static return_t default_return_value () { return hb_empty_t (); }
217 hb_set_t *layout_variation_indices;
218 const hb_set_t *glyph_set;
219 const hb_map_t *gpos_lookups;
221 hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
222 const hb_set_t *glyph_set_,
223 const hb_map_t *gpos_lookups_) :
224 layout_variation_indices (layout_variation_indices_),
225 glyph_set (glyph_set_),
226 gpos_lookups (gpos_lookups_) {}
229 template<typename OutputArray>
230 struct subset_offset_array_t
232 subset_offset_array_t (hb_subset_context_t *subset_context_,
234 const void *base_) : subset_context (subset_context_),
235 out (out_), base (base_) {}
237 template <typename T>
238 bool operator () (T&& offset)
240 auto snap = subset_context->serializer->snapshot ();
241 auto *o = out.serialize_append (subset_context->serializer);
242 if (unlikely (!o)) return false;
243 bool ret = o->serialize_subset (subset_context, offset, base);
247 subset_context->serializer->revert (snap);
253 hb_subset_context_t *subset_context;
259 template<typename OutputArray, typename Arg>
260 struct subset_offset_array_arg_t
262 subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
265 Arg &&arg_) : subset_context (subset_context_), out (out_),
266 base (base_), arg (arg_) {}
268 template <typename T>
269 bool operator () (T&& offset)
271 auto snap = subset_context->serializer->snapshot ();
272 auto *o = out.serialize_append (subset_context->serializer);
273 if (unlikely (!o)) return false;
274 bool ret = o->serialize_subset (subset_context, offset, base, arg);
278 subset_context->serializer->revert (snap);
284 hb_subset_context_t *subset_context;
291 * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
292 * and discards the offset in the array if the subset operation results in an empty
297 template<typename OutputArray>
298 subset_offset_array_t<OutputArray>
299 operator () (hb_subset_context_t *subset_context, OutputArray& out,
300 const void *base) const
301 { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
303 /* Variant with one extra argument passed to serialize_subset */
304 template<typename OutputArray, typename Arg>
305 subset_offset_array_arg_t<OutputArray, Arg>
306 operator () (hb_subset_context_t *subset_context, OutputArray& out,
307 const void *base, Arg &&arg) const
308 { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
310 HB_FUNCOBJ (subset_offset_array);
312 template<typename OutputArray>
313 struct subset_record_array_t
315 subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
316 const void *base_) : subset_layout_context (c_),
317 out (out_), base (base_) {}
319 template <typename T>
321 operator () (T&& record)
323 auto snap = subset_layout_context->subset_context->serializer->snapshot ();
324 bool ret = record.subset (subset_layout_context, base);
325 if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
330 hb_subset_layout_context_t *subset_layout_context;
336 * Helper to subset a RecordList/record array. Subsets each Record in the array and
337 * discards the record if the subset operation returns false.
341 template<typename OutputArray>
342 subset_record_array_t<OutputArray>
343 operator () (hb_subset_layout_context_t *c, OutputArray* out,
344 const void *base) const
345 { return subset_record_array_t<OutputArray> (c, out, base); }
347 HB_FUNCOBJ (subset_record_array);
350 template<typename OutputArray>
351 struct serialize_math_record_array_t
353 serialize_math_record_array_t (hb_serialize_context_t *serialize_context_,
355 const void *base_) : serialize_context (serialize_context_),
356 out (out_), base (base_) {}
358 template <typename T>
359 bool operator () (T&& record)
361 if (!serialize_context->copy (record, base)) return false;
367 hb_serialize_context_t *serialize_context;
373 * Helper to serialize an array of MATH records.
377 template<typename OutputArray>
378 serialize_math_record_array_t<OutputArray>
379 operator () (hb_serialize_context_t *serialize_context, OutputArray& out,
380 const void *base) const
381 { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); }
384 HB_FUNCOBJ (serialize_math_record_array);
388 * OpenType Layout Common Table Formats
394 * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
397 struct Record_sanitize_closure_t {
399 const void *list_base;
402 template <typename Type>
405 int cmp (hb_tag_t a) const { return tag.cmp (a); }
407 bool subset (hb_subset_layout_context_t *c, const void *base) const
410 auto *out = c->subset_context->serializer->embed (this);
411 if (unlikely (!out)) return_trace (false);
412 bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag);
416 bool sanitize (hb_sanitize_context_t *c, const void *base) const
418 TRACE_SANITIZE (this);
419 const Record_sanitize_closure_t closure = {tag, base};
420 return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
423 Tag tag; /* 4-byte Tag identifier */
425 offset; /* Offset from beginning of object holding
428 DEFINE_SIZE_STATIC (6);
431 template <typename Type>
432 struct RecordArrayOf : SortedArray16Of<Record<Type>>
434 const Offset16To<Type>& get_offset (unsigned int i) const
435 { return (*this)[i].offset; }
436 Offset16To<Type>& get_offset (unsigned int i)
437 { return (*this)[i].offset; }
438 const Tag& get_tag (unsigned int i) const
439 { return (*this)[i].tag; }
440 unsigned int get_tags (unsigned int start_offset,
441 unsigned int *record_count /* IN/OUT */,
442 hb_tag_t *record_tags /* OUT */) const
446 + this->sub_array (start_offset, record_count)
447 | hb_map (&Record<Type>::tag)
448 | hb_sink (hb_array (record_tags, *record_count))
453 bool find_index (hb_tag_t tag, unsigned int *index) const
455 return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
459 template <typename Type>
460 struct RecordListOf : RecordArrayOf<Type>
462 const Type& operator [] (unsigned int i) const
463 { return this+this->get_offset (i); }
465 bool subset (hb_subset_context_t *c,
466 hb_subset_layout_context_t *l) const
469 auto *out = c->serializer->start_embed (*this);
470 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
473 | hb_apply (subset_record_array (l, out, this))
478 bool sanitize (hb_sanitize_context_t *c) const
480 TRACE_SANITIZE (this);
481 return_trace (RecordArrayOf<Type>::sanitize (c, this));
487 struct RecordListOfFeature : RecordListOf<Feature>
489 bool subset (hb_subset_context_t *c,
490 hb_subset_layout_context_t *l) const
493 auto *out = c->serializer->start_embed (*this);
494 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
496 unsigned count = this->len;
497 + hb_zip (*this, hb_range (count))
498 | hb_filter (l->feature_index_map, hb_second)
500 | hb_apply (subset_record_array (l, out, this))
507 struct RecordListOfScript : RecordListOf<Script>
509 bool subset (hb_subset_context_t *c,
510 hb_subset_layout_context_t *l) const
513 auto *out = c->serializer->start_embed (*this);
514 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
516 unsigned count = this->len;
517 for (auto _ : + hb_zip (*this, hb_range (count)))
519 auto snap = c->serializer->snapshot ();
520 l->cur_script_index = _.second;
521 bool ret = _.first.subset (l, this);
522 if (!ret) c->serializer->revert (snap);
532 int cmp (hb_codepoint_t g) const
533 { return g < first ? -1 : g <= last ? 0 : +1; }
535 bool sanitize (hb_sanitize_context_t *c) const
537 TRACE_SANITIZE (this);
538 return_trace (c->check_struct (this));
541 bool intersects (const hb_set_t *glyphs) const
542 { return glyphs->intersects (first, last); }
544 template <typename set_t>
545 bool collect_coverage (set_t *glyphs) const
546 { return glyphs->add_range (first, last); }
548 HBGlyphID16 first; /* First GlyphID in the range */
549 HBGlyphID16 last; /* Last GlyphID in the range */
550 HBUINT16 value; /* Value */
552 DEFINE_SIZE_STATIC (6);
554 DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
557 struct IndexArray : Array16Of<Index>
559 bool intersects (const hb_map_t *indexes) const
560 { return hb_any (*this, indexes); }
562 template <typename Iterator,
563 hb_requires (hb_is_iterator (Iterator))>
564 void serialize (hb_serialize_context_t *c,
565 hb_subset_layout_context_t *l,
569 if (unlikely (!c->extend_min ((*this)))) return;
571 for (const auto _ : it)
573 if (!l->visitLookupIndex()) break;
582 unsigned int get_indexes (unsigned int start_offset,
583 unsigned int *_count /* IN/OUT */,
584 unsigned int *_indexes /* OUT */) const
588 + this->sub_array (start_offset, _count)
589 | hb_sink (hb_array (_indexes, *_count))
595 void add_indexes_to (hb_set_t* output /* OUT */) const
597 output->add_array (as_array ());
604 unsigned int get_feature_count () const
605 { return featureIndex.len; }
606 hb_tag_t get_feature_index (unsigned int i) const
607 { return featureIndex[i]; }
608 unsigned int get_feature_indexes (unsigned int start_offset,
609 unsigned int *feature_count /* IN/OUT */,
610 unsigned int *feature_indexes /* OUT */) const
611 { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
612 void add_feature_indexes_to (hb_set_t *feature_indexes) const
613 { featureIndex.add_indexes_to (feature_indexes); }
615 bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
616 unsigned int get_required_feature_index () const
618 if (reqFeatureIndex == 0xFFFFu)
619 return Index::NOT_FOUND_INDEX;
620 return reqFeatureIndex;
623 LangSys* copy (hb_serialize_context_t *c) const
625 TRACE_SERIALIZE (this);
626 return_trace (c->embed (*this));
629 bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
631 if (reqFeatureIndex != o.reqFeatureIndex)
635 + hb_iter (featureIndex)
636 | hb_filter (feature_index_map)
637 | hb_map (feature_index_map)
641 + hb_iter (o.featureIndex)
642 | hb_filter (feature_index_map)
643 | hb_map (feature_index_map)
646 if (iter.len () != o_iter.len ())
649 for (const auto _ : + hb_zip (iter, o_iter))
650 if (_.first != _.second) return false;
655 void collect_features (hb_prune_langsys_context_t *c) const
657 if (!has_required_feature () && !get_feature_count ()) return;
658 if (has_required_feature () &&
659 c->duplicate_feature_map->has (reqFeatureIndex))
660 c->new_feature_indexes->add (get_required_feature_index ());
662 + hb_iter (featureIndex)
663 | hb_filter (c->duplicate_feature_map)
664 | hb_sink (c->new_feature_indexes)
668 bool subset (hb_subset_context_t *c,
669 hb_subset_layout_context_t *l,
670 const Tag *tag = nullptr) const
673 auto *out = c->serializer->start_embed (*this);
674 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
676 out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
678 if (!l->visitFeatureIndex (featureIndex.len))
679 return_trace (false);
682 + hb_iter (featureIndex)
683 | hb_filter (l->feature_index_map)
684 | hb_map (l->feature_index_map)
687 bool ret = bool (it);
688 out->featureIndex.serialize (c->serializer, l, it);
692 bool sanitize (hb_sanitize_context_t *c,
693 const Record_sanitize_closure_t * = nullptr) const
695 TRACE_SANITIZE (this);
696 return_trace (c->check_struct (this) && featureIndex.sanitize (c));
699 Offset16 lookupOrderZ; /* = Null (reserved for an offset to a
700 * reordering table) */
701 HBUINT16 reqFeatureIndex;/* Index of a feature required for this
702 * language system--if no required features
704 IndexArray featureIndex; /* Array of indices into the FeatureList */
706 DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
708 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
712 unsigned int get_lang_sys_count () const
713 { return langSys.len; }
714 const Tag& get_lang_sys_tag (unsigned int i) const
715 { return langSys.get_tag (i); }
716 unsigned int get_lang_sys_tags (unsigned int start_offset,
717 unsigned int *lang_sys_count /* IN/OUT */,
718 hb_tag_t *lang_sys_tags /* OUT */) const
719 { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
720 const LangSys& get_lang_sys (unsigned int i) const
722 if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
723 return this+langSys[i].offset;
725 bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
726 { return langSys.find_index (tag, index); }
728 bool has_default_lang_sys () const { return defaultLangSys != 0; }
729 const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
731 void prune_langsys (hb_prune_langsys_context_t *c,
732 unsigned script_index) const
734 if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
735 if (c->visitedScript (this)) return;
737 if (!c->script_langsys_map->has (script_index))
739 hb_set_t* empty_set = hb_set_create ();
740 if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
742 hb_set_destroy (empty_set);
747 unsigned langsys_count = get_lang_sys_count ();
748 if (has_default_lang_sys ())
750 //only collect features from non-redundant langsys
751 const LangSys& d = get_default_lang_sys ();
752 if (!c->visitedLangsys (&d)) {
753 d.collect_features (c);
756 for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
759 const LangSys& l = this+_.first.offset;
760 if (c->visitedLangsys (&l)) continue;
761 if (l.compare (d, c->duplicate_feature_map)) continue;
763 l.collect_features (c);
764 c->script_langsys_map->get (script_index)->add (_.second);
769 for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
771 const LangSys& l = this+_.first.offset;
772 if (c->visitedLangsys (&l)) continue;
773 l.collect_features (c);
774 c->script_langsys_map->get (script_index)->add (_.second);
779 bool subset (hb_subset_context_t *c,
780 hb_subset_layout_context_t *l,
781 const Tag *tag) const
784 if (!l->visitScript ()) return_trace (false);
786 auto *out = c->serializer->start_embed (*this);
787 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
789 bool defaultLang = false;
790 if (has_default_lang_sys ())
792 c->serializer->push ();
793 const LangSys& ls = this+defaultLangSys;
794 bool ret = ls.subset (c, l);
795 if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
797 c->serializer->pop_discard ();
798 out->defaultLangSys = 0;
802 c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
807 const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
810 unsigned count = langSys.len;
811 + hb_zip (langSys, hb_range (count))
812 | hb_filter (active_langsys, hb_second)
814 | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
815 | hb_apply (subset_record_array (l, &(out->langSys), this))
819 return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
822 bool sanitize (hb_sanitize_context_t *c,
823 const Record_sanitize_closure_t * = nullptr) const
825 TRACE_SANITIZE (this);
826 return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
831 defaultLangSys; /* Offset to DefaultLangSys table--from
832 * beginning of Script table--may be Null */
833 RecordArrayOf<LangSys>
834 langSys; /* Array of LangSysRecords--listed
835 * alphabetically by LangSysTag */
837 DEFINE_SIZE_ARRAY_SIZED (4, langSys);
840 typedef RecordListOfScript ScriptList;
843 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
844 struct FeatureParamsSize
846 bool sanitize (hb_sanitize_context_t *c) const
848 TRACE_SANITIZE (this);
849 if (unlikely (!c->check_struct (this))) return_trace (false);
851 /* This subtable has some "history", if you will. Some earlier versions of
852 * Adobe tools calculated the offset of the FeatureParams subtable from the
853 * beginning of the FeatureList table! Now, that is dealt with in the
854 * Feature implementation. But we still need to be able to tell junk from
855 * real data. Note: We don't check that the nameID actually exists.
857 * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
859 * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
860 * coming out soon, and that the makeotf program will build a font with a
861 * 'size' feature that is correct by the specification.
863 * The specification for this feature tag is in the "OpenType Layout Tag
864 * Registry". You can see a copy of this at:
865 * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
867 * Here is one set of rules to determine if the 'size' feature is built
868 * correctly, or as by the older versions of MakeOTF. You may be able to do
871 * Assume that the offset to the size feature is according to specification,
872 * and make the following value checks. If it fails, assume the size
873 * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
874 * If this fails, reject the 'size' feature. The older makeOTF's calculated the
875 * offset from the beginning of the FeatureList table, rather than from the
876 * beginning of the 'size' Feature table.
878 * If "design size" == 0:
881 * Else if ("subfamily identifier" == 0 and
882 * "range start" == 0 and
883 * "range end" == 0 and
884 * "range start" == 0 and
885 * "menu name ID" == 0)
886 * passes check: this is the format used when there is a design size
887 * specified, but there is no recommended size range.
889 * Else if ("design size" < "range start" or
890 * "design size" > "range end" or
891 * "range end" <= "range start" or
892 * "menu name ID" < 256 or
893 * "menu name ID" > 32767 or
894 * menu name ID is not a name ID which is actually in the name table)
901 return_trace (false);
902 else if (subfamilyID == 0 &&
903 subfamilyNameID == 0 &&
907 else if (designSize < rangeStart ||
908 designSize > rangeEnd ||
909 subfamilyNameID < 256 ||
910 subfamilyNameID > 32767)
911 return_trace (false);
916 bool subset (hb_subset_context_t *c) const
919 return_trace ((bool) c->serializer->embed (*this));
922 HBUINT16 designSize; /* Represents the design size in 720/inch
923 * units (decipoints). The design size entry
924 * must be non-zero. When there is a design
925 * size but no recommended size range, the
926 * rest of the array will consist of zeros. */
927 HBUINT16 subfamilyID; /* Has no independent meaning, but serves
928 * as an identifier that associates fonts
929 * in a subfamily. All fonts which share a
930 * Preferred or Font Family name and which
931 * differ only by size range shall have the
932 * same subfamily value, and no fonts which
933 * differ in weight or style shall have the
934 * same subfamily value. If this value is
935 * zero, the remaining fields in the array
936 * will be ignored. */
937 NameID subfamilyNameID;/* If the preceding value is non-zero, this
938 * value must be set in the range 256 - 32767
939 * (inclusive). It records the value of a
940 * field in the name table, which must
941 * contain English-language strings encoded
942 * in Windows Unicode and Macintosh Roman,
943 * and may contain additional strings
944 * localized to other scripts and languages.
945 * Each of these strings is the name an
946 * application should use, in combination
947 * with the family name, to represent the
948 * subfamily in a menu. Applications will
949 * choose the appropriate version based on
950 * their selection criteria. */
951 HBUINT16 rangeStart; /* Large end of the recommended usage range
952 * (inclusive), stored in 720/inch units
954 HBUINT16 rangeEnd; /* Small end of the recommended usage range
955 (exclusive), stored in 720/inch units
958 DEFINE_SIZE_STATIC (10);
961 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
962 struct FeatureParamsStylisticSet
964 bool sanitize (hb_sanitize_context_t *c) const
966 TRACE_SANITIZE (this);
967 /* Right now minorVersion is at zero. Which means, any table supports
968 * the uiNameID field. */
969 return_trace (c->check_struct (this));
972 bool subset (hb_subset_context_t *c) const
975 return_trace ((bool) c->serializer->embed (*this));
978 HBUINT16 version; /* (set to 0): This corresponds to a “minor”
979 * version number. Additional data may be
980 * added to the end of this Feature Parameters
981 * table in the future. */
983 NameID uiNameID; /* The 'name' table name ID that specifies a
984 * string (or strings, for multiple languages)
985 * for a user-interface label for this
986 * feature. The values of uiLabelNameId and
987 * sampleTextNameId are expected to be in the
988 * font-specific name ID range (256-32767),
989 * though that is not a requirement in this
990 * Feature Parameters specification. The
991 * user-interface label for the feature can
992 * be provided in multiple languages. An
993 * English string should be included as a
994 * fallback. The string should be kept to a
995 * minimal length to fit comfortably with
996 * different application interfaces. */
998 DEFINE_SIZE_STATIC (4);
1001 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
1002 struct FeatureParamsCharacterVariants
1005 get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
1009 + characters.sub_array (start_offset, char_count)
1010 | hb_sink (hb_array (chars, *char_count))
1013 return characters.len;
1016 unsigned get_size () const
1017 { return min_size + characters.len * HBUINT24::static_size; }
1019 bool subset (hb_subset_context_t *c) const
1021 TRACE_SUBSET (this);
1022 return_trace ((bool) c->serializer->embed (*this));
1025 bool sanitize (hb_sanitize_context_t *c) const
1027 TRACE_SANITIZE (this);
1028 return_trace (c->check_struct (this) &&
1029 characters.sanitize (c));
1032 HBUINT16 format; /* Format number is set to 0. */
1033 NameID featUILableNameID; /* The ‘name’ table name ID that
1034 * specifies a string (or strings,
1035 * for multiple languages) for a
1036 * user-interface label for this
1037 * feature. (May be NULL.) */
1038 NameID featUITooltipTextNameID;/* The ‘name’ table name ID that
1039 * specifies a string (or strings,
1040 * for multiple languages) that an
1041 * application can use for tooltip
1042 * text for this feature. (May be
1044 NameID sampleTextNameID; /* The ‘name’ table name ID that
1045 * specifies sample text that
1046 * illustrates the effect of this
1047 * feature. (May be NULL.) */
1048 HBUINT16 numNamedParameters; /* Number of named parameters. (May
1050 NameID firstParamUILabelNameID;/* The first ‘name’ table name ID
1051 * used to specify strings for
1052 * user-interface labels for the
1053 * feature parameters. (Must be zero
1054 * if numParameters is zero.) */
1056 characters; /* Array of the Unicode Scalar Value
1057 * of the characters for which this
1058 * feature provides glyph variants.
1061 DEFINE_SIZE_ARRAY (14, characters);
1064 struct FeatureParams
1066 bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
1068 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
1071 TRACE_SANITIZE (this);
1072 if (tag == HB_TAG ('s','i','z','e'))
1073 return_trace (u.size.sanitize (c));
1074 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1075 return_trace (u.stylisticSet.sanitize (c));
1076 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1077 return_trace (u.characterVariants.sanitize (c));
1078 return_trace (true);
1081 bool subset (hb_subset_context_t *c, const Tag* tag) const
1083 TRACE_SUBSET (this);
1084 if (!tag) return_trace (false);
1085 if (*tag == HB_TAG ('s','i','z','e'))
1086 return_trace (u.size.subset (c));
1087 if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1088 return_trace (u.stylisticSet.subset (c));
1089 if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1090 return_trace (u.characterVariants.subset (c));
1091 return_trace (false);
1094 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
1095 const FeatureParamsSize& get_size_params (hb_tag_t tag) const
1097 if (tag == HB_TAG ('s','i','z','e'))
1099 return Null (FeatureParamsSize);
1101 const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
1103 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1104 return u.stylisticSet;
1105 return Null (FeatureParamsStylisticSet);
1107 const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
1109 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1110 return u.characterVariants;
1111 return Null (FeatureParamsCharacterVariants);
1117 FeatureParamsSize size;
1118 FeatureParamsStylisticSet stylisticSet;
1119 FeatureParamsCharacterVariants characterVariants;
1122 DEFINE_SIZE_MIN (0);
1127 unsigned int get_lookup_count () const
1128 { return lookupIndex.len; }
1129 hb_tag_t get_lookup_index (unsigned int i) const
1130 { return lookupIndex[i]; }
1131 unsigned int get_lookup_indexes (unsigned int start_index,
1132 unsigned int *lookup_count /* IN/OUT */,
1133 unsigned int *lookup_tags /* OUT */) const
1134 { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
1135 void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
1136 { lookupIndex.add_indexes_to (lookup_indexes); }
1138 const FeatureParams &get_feature_params () const
1139 { return this+featureParams; }
1141 bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
1142 { return lookupIndex.intersects (lookup_indexes); }
1144 bool subset (hb_subset_context_t *c,
1145 hb_subset_layout_context_t *l,
1146 const Tag *tag = nullptr) const
1148 TRACE_SUBSET (this);
1149 auto *out = c->serializer->start_embed (*this);
1150 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1152 out->featureParams.serialize_subset (c, featureParams, this, tag);
1155 + hb_iter (lookupIndex)
1156 | hb_filter (l->lookup_index_map)
1157 | hb_map (l->lookup_index_map)
1160 out->lookupIndex.serialize (c->serializer, l, it);
1161 // The decision to keep or drop this feature is already made before we get here
1162 // so always retain it.
1163 return_trace (true);
1166 bool sanitize (hb_sanitize_context_t *c,
1167 const Record_sanitize_closure_t *closure = nullptr) const
1169 TRACE_SANITIZE (this);
1170 if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
1171 return_trace (false);
1173 /* Some earlier versions of Adobe tools calculated the offset of the
1174 * FeatureParams subtable from the beginning of the FeatureList table!
1176 * If sanitizing "failed" for the FeatureParams subtable, try it with the
1177 * alternative location. We would know sanitize "failed" if old value
1178 * of the offset was non-zero, but it's zeroed now.
1180 * Only do this for the 'size' feature, since at the time of the faulty
1181 * Adobe tools, only the 'size' feature had FeatureParams defined.
1184 if (likely (featureParams.is_null ()))
1185 return_trace (true);
1187 unsigned int orig_offset = featureParams;
1188 if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
1189 return_trace (false);
1191 if (featureParams == 0 && closure &&
1192 closure->tag == HB_TAG ('s','i','z','e') &&
1193 closure->list_base && closure->list_base < this)
1195 unsigned int new_offset_int = orig_offset -
1196 (((char *) this) - ((char *) closure->list_base));
1198 Offset16To<FeatureParams> new_offset;
1199 /* Check that it would not overflow. */
1200 new_offset = new_offset_int;
1201 if (new_offset == new_offset_int &&
1202 c->try_set (&featureParams, new_offset_int) &&
1203 !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
1204 return_trace (false);
1207 return_trace (true);
1210 Offset16To<FeatureParams>
1211 featureParams; /* Offset to Feature Parameters table (if one
1212 * has been defined for the feature), relative
1213 * to the beginning of the Feature Table; = Null
1214 * if not required */
1215 IndexArray lookupIndex; /* Array of LookupList indices */
1217 DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
1220 typedef RecordListOf<Feature> FeatureList;
1223 struct LookupFlag : HBUINT16
1226 RightToLeft = 0x0001u,
1227 IgnoreBaseGlyphs = 0x0002u,
1228 IgnoreLigatures = 0x0004u,
1229 IgnoreMarks = 0x0008u,
1230 IgnoreFlags = 0x000Eu,
1231 UseMarkFilteringSet = 0x0010u,
1233 MarkAttachmentType = 0xFF00u
1236 DEFINE_SIZE_STATIC (2);
1239 } /* namespace OT */
1240 /* This has to be outside the namespace. */
1241 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
1246 unsigned int get_subtable_count () const { return subTable.len; }
1248 template <typename TSubTable>
1249 const Array16OfOffset16To<TSubTable>& get_subtables () const
1250 { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
1251 template <typename TSubTable>
1252 Array16OfOffset16To<TSubTable>& get_subtables ()
1253 { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
1255 template <typename TSubTable>
1256 const TSubTable& get_subtable (unsigned int i) const
1257 { return this+get_subtables<TSubTable> ()[i]; }
1258 template <typename TSubTable>
1259 TSubTable& get_subtable (unsigned int i)
1260 { return this+get_subtables<TSubTable> ()[i]; }
1262 unsigned int get_size () const
1264 const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1265 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1266 return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1267 return (const char *) &markFilteringSet - (const char *) this;
1270 unsigned int get_type () const { return lookupType; }
1272 /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1273 * higher 16-bit is mark-filtering-set if the lookup uses one.
1274 * Not to be confused with glyph_props which is very similar. */
1275 uint32_t get_props () const
1277 unsigned int flag = lookupFlag;
1278 if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
1280 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1281 flag += (markFilteringSet << 16);
1286 template <typename TSubTable, typename context_t, typename ...Ts>
1287 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1289 unsigned int lookup_type = get_type ();
1290 TRACE_DISPATCH (this, lookup_type);
1291 unsigned int count = get_subtable_count ();
1292 for (unsigned int i = 0; i < count; i++) {
1293 typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, std::forward<Ts> (ds)...);
1294 if (c->stop_sublookup_iteration (r))
1297 return_trace (c->default_return_value ());
1300 bool serialize (hb_serialize_context_t *c,
1301 unsigned int lookup_type,
1302 uint32_t lookup_props,
1303 unsigned int num_subtables)
1305 TRACE_SERIALIZE (this);
1306 if (unlikely (!c->extend_min (this))) return_trace (false);
1307 lookupType = lookup_type;
1308 lookupFlag = lookup_props & 0xFFFFu;
1309 if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1310 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1312 if (unlikely (!c->extend (this))) return_trace (false);
1313 HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1314 markFilteringSet = lookup_props >> 16;
1316 return_trace (true);
1319 template <typename TSubTable>
1320 bool subset (hb_subset_context_t *c) const
1322 TRACE_SUBSET (this);
1323 auto *out = c->serializer->start_embed (*this);
1324 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1325 out->lookupType = lookupType;
1326 out->lookupFlag = lookupFlag;
1328 const hb_set_t *glyphset = c->plan->glyphset_gsub ();
1329 unsigned int lookup_type = get_type ();
1330 + hb_iter (get_subtables <TSubTable> ())
1331 | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1332 | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1335 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1337 if (unlikely (!c->serializer->extend (out))) return_trace (false);
1338 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1339 HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
1340 outMarkFilteringSet = markFilteringSet;
1343 return_trace (out->subTable.len);
1346 template <typename TSubTable>
1347 bool sanitize (hb_sanitize_context_t *c) const
1349 TRACE_SANITIZE (this);
1350 if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1352 unsigned subtables = get_subtable_count ();
1353 if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1355 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1357 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1358 if (!markFilteringSet.sanitize (c)) return_trace (false);
1361 if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1362 return_trace (false);
1364 if (unlikely (get_type () == TSubTable::Extension && subtables && !c->get_edit_count ()))
1366 /* The spec says all subtables of an Extension lookup should
1367 * have the same type, which shall not be the Extension type
1368 * itself (but we already checked for that).
1369 * This is specially important if one has a reverse type!
1371 * We only do this if sanitizer edit_count is zero. Otherwise,
1372 * some of the subtables might have become insane after they
1373 * were sanity-checked by the edits of subsequent subtables.
1374 * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1376 unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1377 for (unsigned int i = 1; i < subtables; i++)
1378 if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1379 return_trace (false);
1381 return_trace (true);
1385 HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */
1386 HBUINT16 lookupFlag; /* Lookup qualifiers */
1388 subTable; /* Array of SubTables */
1389 /*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1390 * structure. This field is only present if bit
1391 * UseMarkFilteringSet of lookup flags is set. */
1393 DEFINE_SIZE_ARRAY (6, subTable);
1396 typedef List16OfOffset16To<Lookup> LookupList;
1398 template <typename TLookup>
1399 struct LookupOffsetList : List16OfOffset16To<TLookup>
1401 bool subset (hb_subset_context_t *c,
1402 hb_subset_layout_context_t *l) const
1404 TRACE_SUBSET (this);
1405 auto *out = c->serializer->start_embed (this);
1406 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1408 unsigned count = this->len;
1409 + hb_zip (*this, hb_range (count))
1410 | hb_filter (l->lookup_index_map, hb_second)
1412 | hb_apply (subset_offset_array (c, *out, this))
1414 return_trace (true);
1417 bool sanitize (hb_sanitize_context_t *c) const
1419 TRACE_SANITIZE (this);
1420 return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
1429 struct CoverageFormat1
1431 friend struct Coverage;
1434 unsigned int get_coverage (hb_codepoint_t glyph_id) const
1437 glyphArray.bfind (glyph_id, &i, HB_NOT_FOUND_STORE, NOT_COVERED);
1441 template <typename Iterator,
1442 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1443 bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1445 TRACE_SERIALIZE (this);
1446 return_trace (glyphArray.serialize (c, glyphs));
1449 bool sanitize (hb_sanitize_context_t *c) const
1451 TRACE_SANITIZE (this);
1452 return_trace (glyphArray.sanitize (c));
1455 bool intersects (const hb_set_t *glyphs) const
1457 /* TODO Speed up, using hb_set_next() and bsearch()? */
1458 for (const auto& g : glyphArray.as_array ())
1459 if (glyphs->has (g))
1463 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1464 { return glyphs->has (glyphArray[index]); }
1466 void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1468 unsigned count = glyphArray.len;
1469 for (unsigned i = 0; i < count; i++)
1470 if (glyphs->has (glyphArray[i]))
1471 intersect_glyphs->add (glyphArray[i]);
1474 template <typename set_t>
1475 bool collect_coverage (set_t *glyphs) const
1476 { return glyphs->add_sorted_array (glyphArray.as_array ()); }
1479 /* Older compilers need this to be public. */
1482 void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
1484 bool more () const { return i < c->glyphArray.len; }
1485 void next () { i++; }
1486 hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
1487 bool operator != (const iter_t& o) const
1488 { return i != o.i || c != o.c; }
1491 const struct CoverageFormat1 *c;
1497 HBUINT16 coverageFormat; /* Format identifier--format = 1 */
1498 SortedArray16Of<HBGlyphID16>
1499 glyphArray; /* Array of GlyphIDs--in numerical order */
1501 DEFINE_SIZE_ARRAY (4, glyphArray);
1504 struct CoverageFormat2
1506 friend struct Coverage;
1509 unsigned int get_coverage (hb_codepoint_t glyph_id) const
1511 const RangeRecord &range = rangeRecord.bsearch (glyph_id);
1512 return likely (range.first <= range.last)
1513 ? (unsigned int) range.value + (glyph_id - range.first)
1517 template <typename Iterator,
1518 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1519 bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1521 TRACE_SERIALIZE (this);
1522 if (unlikely (!c->extend_min (this))) return_trace (false);
1524 if (unlikely (!glyphs))
1526 rangeRecord.len = 0;
1527 return_trace (true);
1530 /* TODO(iter) Write more efficiently? */
1532 unsigned num_ranges = 0;
1533 hb_codepoint_t last = (hb_codepoint_t) -2;
1534 for (auto g: glyphs)
1541 if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
1544 unsigned range = (unsigned) -1;
1545 last = (hb_codepoint_t) -2;
1546 for (auto g: glyphs)
1551 rangeRecord[range].first = g;
1552 rangeRecord[range].value = count;
1554 rangeRecord[range].last = g;
1559 return_trace (true);
1562 bool sanitize (hb_sanitize_context_t *c) const
1564 TRACE_SANITIZE (this);
1565 return_trace (rangeRecord.sanitize (c));
1568 bool intersects (const hb_set_t *glyphs) const
1570 /* TODO Speed up, using hb_set_next() and bsearch()? */
1571 /* TODO(iter) Rewrite as dagger. */
1572 for (const auto& range : rangeRecord.as_array ())
1573 if (range.intersects (glyphs))
1577 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1579 /* TODO(iter) Rewrite as dagger. */
1580 for (const auto& range : rangeRecord.as_array ())
1582 if (range.value <= index &&
1583 index < (unsigned int) range.value + (range.last - range.first) &&
1584 range.intersects (glyphs))
1586 else if (index < range.value)
1592 void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1594 for (const auto& range : rangeRecord.as_array ())
1596 if (!range.intersects (glyphs)) continue;
1597 for (hb_codepoint_t g = range.first; g <= range.last; g++)
1598 if (glyphs->has (g)) intersect_glyphs->add (g);
1602 template <typename set_t>
1603 bool collect_coverage (set_t *glyphs) const
1605 unsigned int count = rangeRecord.len;
1606 for (unsigned int i = 0; i < count; i++)
1607 if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1613 /* Older compilers need this to be public. */
1616 void init (const CoverageFormat2 &c_)
1621 j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
1622 if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
1624 /* Broken table. Skip. */
1625 i = c->rangeRecord.len;
1629 bool more () const { return i < c->rangeRecord.len; }
1632 if (j >= c->rangeRecord[i].last)
1637 unsigned int old = coverage;
1638 j = c->rangeRecord[i].first;
1639 coverage = c->rangeRecord[i].value;
1640 if (unlikely (coverage != old + 1))
1642 /* Broken table. Skip. Important to avoid DoS.
1643 * Also, our callers depend on coverage being
1644 * consecutive and monotonically increasing,
1646 i = c->rangeRecord.len;
1655 hb_codepoint_t get_glyph () const { return j; }
1656 bool operator != (const iter_t& o) const
1657 { return i != o.i || j != o.j || c != o.c; }
1660 const struct CoverageFormat2 *c;
1661 unsigned int i, coverage;
1667 HBUINT16 coverageFormat; /* Format identifier--format = 2 */
1668 SortedArray16Of<RangeRecord>
1669 rangeRecord; /* Array of glyph ranges--ordered by
1670 * Start GlyphID. rangeCount entries
1673 DEFINE_SIZE_ARRAY (4, rangeRecord);
1678 /* Has interface. */
1679 static constexpr unsigned SENTINEL = NOT_COVERED;
1680 typedef unsigned int value_t;
1681 value_t operator [] (hb_codepoint_t k) const { return get (k); }
1682 bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1684 bool operator () (hb_codepoint_t k) const { return has (k); }
1686 unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
1687 unsigned int get_coverage (hb_codepoint_t glyph_id) const
1690 case 1: return u.format1.get_coverage (glyph_id);
1691 case 2: return u.format2.get_coverage (glyph_id);
1692 default:return NOT_COVERED;
1696 template <typename Iterator,
1697 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1698 bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1700 TRACE_SERIALIZE (this);
1701 if (unlikely (!c->extend_min (this))) return_trace (false);
1704 unsigned num_ranges = 0;
1705 hb_codepoint_t last = (hb_codepoint_t) -2;
1706 for (auto g: glyphs)
1713 u.format = count <= num_ranges * 3 ? 1 : 2;
1717 case 1: return_trace (u.format1.serialize (c, glyphs));
1718 case 2: return_trace (u.format2.serialize (c, glyphs));
1719 default:return_trace (false);
1723 bool subset (hb_subset_context_t *c) const
1725 TRACE_SUBSET (this);
1726 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1727 const hb_map_t &glyph_map = *c->plan->glyph_map;
1731 | hb_filter (glyphset)
1732 | hb_map_retains_sorting (glyph_map)
1735 bool ret = bool (it);
1736 Coverage_serialize (c->serializer, it);
1740 bool sanitize (hb_sanitize_context_t *c) const
1742 TRACE_SANITIZE (this);
1743 if (!u.format.sanitize (c)) return_trace (false);
1746 case 1: return_trace (u.format1.sanitize (c));
1747 case 2: return_trace (u.format2.sanitize (c));
1748 default:return_trace (true);
1752 bool intersects (const hb_set_t *glyphs) const
1756 case 1: return u.format1.intersects (glyphs);
1757 case 2: return u.format2.intersects (glyphs);
1758 default:return false;
1761 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1765 case 1: return u.format1.intersects_coverage (glyphs, index);
1766 case 2: return u.format2.intersects_coverage (glyphs, index);
1767 default:return false;
1771 /* Might return false if array looks unsorted.
1772 * Used for faster rejection of corrupt data. */
1773 template <typename set_t>
1774 bool collect_coverage (set_t *glyphs) const
1778 case 1: return u.format1.collect_coverage (glyphs);
1779 case 2: return u.format2.collect_coverage (glyphs);
1780 default:return false;
1784 void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1788 case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1789 case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1794 struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
1796 static constexpr bool is_sorted_iterator = true;
1797 iter_t (const Coverage &c_ = Null (Coverage))
1799 memset (this, 0, sizeof (*this));
1800 format = c_.u.format;
1803 case 1: u.format1.init (c_.u.format1); return;
1804 case 2: u.format2.init (c_.u.format2); return;
1808 bool __more__ () const
1812 case 1: return u.format1.more ();
1813 case 2: return u.format2.more ();
1814 default:return false;
1821 case 1: u.format1.next (); break;
1822 case 2: u.format2.next (); break;
1826 typedef hb_codepoint_t __item_t__;
1827 __item_t__ __item__ () const { return get_glyph (); }
1829 hb_codepoint_t get_glyph () const
1833 case 1: return u.format1.get_glyph ();
1834 case 2: return u.format2.get_glyph ();
1838 bool operator != (const iter_t& o) const
1840 if (format != o.format) return true;
1843 case 1: return u.format1 != o.u.format1;
1844 case 2: return u.format2 != o.u.format2;
1845 default:return false;
1850 unsigned int format;
1852 CoverageFormat2::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */
1853 CoverageFormat1::iter_t format1;
1856 iter_t iter () const { return iter_t (*this); }
1860 HBUINT16 format; /* Format identifier */
1861 CoverageFormat1 format1;
1862 CoverageFormat2 format2;
1865 DEFINE_SIZE_UNION (2, format);
1868 template<typename Iterator>
1870 Coverage_serialize (hb_serialize_context_t *c,
1872 { c->start_embed<Coverage> ()->serialize (c, it); }
1874 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1875 const hb_map_t &gid_klass_map,
1876 hb_sorted_vector_t<HBGlyphID16> &glyphs,
1877 const hb_set_t &klasses,
1878 bool use_class_zero,
1879 hb_map_t *klass_map /*INOUT*/)
1883 ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter ()
1884 | hb_map (gid_klass_map)));
1888 /* any glyph not assigned a class value falls into Class zero (0),
1889 * if any glyph assigned to class 0, remapping must start with 0->0*/
1890 if (!use_class_zero)
1891 klass_map->set (0, 0);
1893 unsigned idx = klass_map->has (0) ? 1 : 0;
1894 for (const unsigned k: klasses.iter ())
1896 if (klass_map->has (k)) continue;
1897 klass_map->set (k, idx);
1903 | hb_map_retains_sorting ([&] (const HBGlyphID16& gid) -> hb_pair_t<hb_codepoint_t, unsigned>
1905 unsigned new_klass = klass_map->get (gid_klass_map[gid]);
1906 return hb_pair ((hb_codepoint_t)gid, new_klass);
1910 c->propagate_error (glyphs, klasses);
1911 ClassDef_serialize (c, it);
1915 * Class Definition Table
1918 struct ClassDefFormat1
1920 friend struct ClassDef;
1923 unsigned int get_class (hb_codepoint_t glyph_id) const
1925 return classValue[(unsigned int) (glyph_id - startGlyph)];
1928 template<typename Iterator,
1929 hb_requires (hb_is_iterator (Iterator))>
1930 bool serialize (hb_serialize_context_t *c,
1933 TRACE_SERIALIZE (this);
1934 if (unlikely (!c->extend_min (this))) return_trace (false);
1941 return_trace (true);
1944 hb_codepoint_t glyph_min = (*it).first;
1945 hb_codepoint_t glyph_max = + it
1947 | hb_reduce (hb_max, 0u);
1948 unsigned glyph_count = glyph_max - glyph_min + 1;
1950 startGlyph = glyph_min;
1951 if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
1952 for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
1954 unsigned idx = gid_klass_pair.first - glyph_min;
1955 classValue[idx] = gid_klass_pair.second;
1957 return_trace (true);
1960 bool subset (hb_subset_context_t *c,
1961 hb_map_t *klass_map = nullptr /*OUT*/,
1962 bool keep_empty_table = true,
1963 bool use_class_zero = true,
1964 const Coverage* glyph_filter = nullptr) const
1966 TRACE_SUBSET (this);
1967 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1968 const hb_map_t &glyph_map = *c->plan->glyph_map;
1970 hb_sorted_vector_t<HBGlyphID16> glyphs;
1971 hb_set_t orig_klasses;
1972 hb_map_t gid_org_klass_map;
1974 hb_codepoint_t start = startGlyph;
1975 hb_codepoint_t end = start + classValue.len;
1977 for (const hb_codepoint_t gid : + hb_range (start, end)
1978 | hb_filter (glyphset))
1980 if (glyph_filter && !glyph_filter->has(gid)) continue;
1982 unsigned klass = classValue[gid - start];
1983 if (!klass) continue;
1985 glyphs.push (glyph_map[gid]);
1986 gid_org_klass_map.set (glyph_map[gid], klass);
1987 orig_klasses.add (klass);
1990 unsigned glyph_count = glyph_filter
1991 ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
1992 : glyphset.get_population ();
1993 use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
1994 ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
1995 glyphs, orig_klasses, use_class_zero, klass_map);
1996 return_trace (keep_empty_table || (bool) glyphs);
1999 bool sanitize (hb_sanitize_context_t *c) const
2001 TRACE_SANITIZE (this);
2002 return_trace (c->check_struct (this) && classValue.sanitize (c));
2005 template <typename set_t>
2006 bool collect_coverage (set_t *glyphs) const
2008 unsigned int start = 0;
2009 unsigned int count = classValue.len;
2010 for (unsigned int i = 0; i < count; i++)
2016 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
2022 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
2028 template <typename set_t>
2029 bool collect_class (set_t *glyphs, unsigned klass) const
2031 unsigned int count = classValue.len;
2032 for (unsigned int i = 0; i < count; i++)
2033 if (classValue[i] == klass) glyphs->add (startGlyph + i);
2037 bool intersects (const hb_set_t *glyphs) const
2039 /* TODO Speed up, using hb_set_next()? */
2040 hb_codepoint_t start = startGlyph;
2041 hb_codepoint_t end = startGlyph + classValue.len;
2042 for (hb_codepoint_t iter = startGlyph - 1;
2043 hb_set_next (glyphs, &iter) && iter < end;)
2044 if (classValue[iter - start]) return true;
2047 bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2049 unsigned int count = classValue.len;
2052 /* Match if there's any glyph that is not listed! */
2053 hb_codepoint_t g = HB_SET_VALUE_INVALID;
2054 if (!hb_set_next (glyphs, &g)) return false;
2055 if (g < startGlyph) return true;
2056 g = startGlyph + count - 1;
2057 if (hb_set_next (glyphs, &g)) return true;
2060 /* TODO Speed up, using set overlap first? */
2061 /* TODO(iter) Rewrite as dagger. */
2063 const HBUINT16 *arr = classValue.arrayZ;
2064 for (unsigned int i = 0; i < count; i++)
2065 if (arr[i] == k && glyphs->has (startGlyph + i))
2070 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2072 unsigned count = classValue.len;
2075 hb_codepoint_t endGlyph = startGlyph + count -1;
2076 for (hb_codepoint_t g : glyphs->iter ())
2077 if (g < startGlyph || g > endGlyph)
2078 intersect_glyphs->add (g);
2083 for (unsigned i = 0; i < count; i++)
2084 if (classValue[i] == klass && glyphs->has (startGlyph + i))
2085 intersect_glyphs->add (startGlyph + i);
2088 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2090 if (glyphs->is_empty ()) return;
2091 hb_codepoint_t end_glyph = startGlyph + classValue.len - 1;
2092 if (glyphs->get_min () < startGlyph ||
2093 glyphs->get_max () > end_glyph)
2094 intersect_classes->add (0);
2096 for (const auto& _ : + hb_enumerate (classValue))
2098 hb_codepoint_t g = startGlyph + _.first;
2099 if (glyphs->has (g))
2100 intersect_classes->add (_.second);
2105 HBUINT16 classFormat; /* Format identifier--format = 1 */
2106 HBGlyphID16 startGlyph; /* First GlyphID of the classValueArray */
2108 classValue; /* Array of Class Values--one per GlyphID */
2110 DEFINE_SIZE_ARRAY (6, classValue);
2113 struct ClassDefFormat2
2115 friend struct ClassDef;
2118 unsigned int get_class (hb_codepoint_t glyph_id) const
2120 return rangeRecord.bsearch (glyph_id).value;
2123 template<typename Iterator,
2124 hb_requires (hb_is_iterator (Iterator))>
2125 bool serialize (hb_serialize_context_t *c,
2128 TRACE_SERIALIZE (this);
2129 if (unlikely (!c->extend_min (this))) return_trace (false);
2134 rangeRecord.len = 0;
2135 return_trace (true);
2138 unsigned num_ranges = 1;
2139 hb_codepoint_t prev_gid = (*it).first;
2140 unsigned prev_klass = (*it).second;
2142 RangeRecord range_rec;
2143 range_rec.first = prev_gid;
2144 range_rec.last = prev_gid;
2145 range_rec.value = prev_klass;
2147 RangeRecord *record = c->copy (range_rec);
2148 if (unlikely (!record)) return_trace (false);
2150 for (const auto gid_klass_pair : + (++it))
2152 hb_codepoint_t cur_gid = gid_klass_pair.first;
2153 unsigned cur_klass = gid_klass_pair.second;
2155 if (cur_gid != prev_gid + 1 ||
2156 cur_klass != prev_klass)
2158 if (unlikely (!record)) break;
2159 record->last = prev_gid;
2162 range_rec.first = cur_gid;
2163 range_rec.last = cur_gid;
2164 range_rec.value = cur_klass;
2166 record = c->copy (range_rec);
2169 prev_klass = cur_klass;
2173 if (likely (record)) record->last = prev_gid;
2174 rangeRecord.len = num_ranges;
2175 return_trace (true);
2178 bool subset (hb_subset_context_t *c,
2179 hb_map_t *klass_map = nullptr /*OUT*/,
2180 bool keep_empty_table = true,
2181 bool use_class_zero = true,
2182 const Coverage* glyph_filter = nullptr) const
2184 TRACE_SUBSET (this);
2185 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2186 const hb_map_t &glyph_map = *c->plan->glyph_map;
2188 hb_sorted_vector_t<HBGlyphID16> glyphs;
2189 hb_set_t orig_klasses;
2190 hb_map_t gid_org_klass_map;
2192 unsigned count = rangeRecord.len;
2193 for (unsigned i = 0; i < count; i++)
2195 unsigned klass = rangeRecord[i].value;
2196 if (!klass) continue;
2197 hb_codepoint_t start = rangeRecord[i].first;
2198 hb_codepoint_t end = rangeRecord[i].last + 1;
2199 for (hb_codepoint_t g = start; g < end; g++)
2201 if (!glyphset.has (g)) continue;
2202 if (glyph_filter && !glyph_filter->has (g)) continue;
2203 glyphs.push (glyph_map[g]);
2204 gid_org_klass_map.set (glyph_map[g], klass);
2205 orig_klasses.add (klass);
2209 unsigned glyph_count = glyph_filter
2210 ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
2211 : glyphset.get_population ();
2212 use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
2213 ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
2214 glyphs, orig_klasses, use_class_zero, klass_map);
2215 return_trace (keep_empty_table || (bool) glyphs);
2218 bool sanitize (hb_sanitize_context_t *c) const
2220 TRACE_SANITIZE (this);
2221 return_trace (rangeRecord.sanitize (c));
2224 template <typename set_t>
2225 bool collect_coverage (set_t *glyphs) const
2227 unsigned int count = rangeRecord.len;
2228 for (unsigned int i = 0; i < count; i++)
2229 if (rangeRecord[i].value)
2230 if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2235 template <typename set_t>
2236 bool collect_class (set_t *glyphs, unsigned int klass) const
2238 unsigned int count = rangeRecord.len;
2239 for (unsigned int i = 0; i < count; i++)
2241 if (rangeRecord[i].value == klass)
2242 if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2248 bool intersects (const hb_set_t *glyphs) const
2250 /* TODO Speed up, using hb_set_next() and bsearch()? */
2251 unsigned int count = rangeRecord.len;
2252 for (unsigned int i = 0; i < count; i++)
2254 const auto& range = rangeRecord[i];
2255 if (range.intersects (glyphs) && range.value)
2260 bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2262 unsigned int count = rangeRecord.len;
2265 /* Match if there's any glyph that is not listed! */
2266 hb_codepoint_t g = HB_SET_VALUE_INVALID;
2267 for (unsigned int i = 0; i < count; i++)
2269 if (!hb_set_next (glyphs, &g))
2271 if (g < rangeRecord[i].first)
2273 g = rangeRecord[i].last;
2275 if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2279 /* TODO Speed up, using set overlap first? */
2280 /* TODO(iter) Rewrite as dagger. */
2282 const RangeRecord *arr = rangeRecord.arrayZ;
2283 for (unsigned int i = 0; i < count; i++)
2284 if (arr[i].value == k && arr[i].intersects (glyphs))
2289 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2291 unsigned count = rangeRecord.len;
2294 hb_codepoint_t g = HB_SET_VALUE_INVALID;
2295 for (unsigned int i = 0; i < count; i++)
2297 if (!hb_set_next (glyphs, &g))
2299 while (g != HB_SET_VALUE_INVALID && g < rangeRecord[i].first)
2301 intersect_glyphs->add (g);
2302 hb_set_next (glyphs, &g);
2304 g = rangeRecord[i].last;
2306 while (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2307 intersect_glyphs->add (g);
2312 hb_codepoint_t g = HB_SET_VALUE_INVALID;
2313 for (unsigned int i = 0; i < count; i++)
2315 if (rangeRecord[i].value != klass) continue;
2317 if (g != HB_SET_VALUE_INVALID)
2319 if (g >= rangeRecord[i].first &&
2320 g <= rangeRecord[i].last)
2321 intersect_glyphs->add (g);
2322 if (g > rangeRecord[i].last)
2326 g = rangeRecord[i].first - 1;
2327 while (hb_set_next (glyphs, &g))
2329 if (g >= rangeRecord[i].first && g <= rangeRecord[i].last)
2330 intersect_glyphs->add (g);
2331 else if (g > rangeRecord[i].last)
2337 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2339 if (glyphs->is_empty ()) return;
2341 unsigned count = rangeRecord.len;
2342 hb_codepoint_t g = HB_SET_VALUE_INVALID;
2343 for (unsigned int i = 0; i < count; i++)
2345 if (!hb_set_next (glyphs, &g))
2347 if (g < rangeRecord[i].first)
2349 intersect_classes->add (0);
2352 g = rangeRecord[i].last;
2354 if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2355 intersect_classes->add (0);
2357 for (const RangeRecord& record : rangeRecord.iter ())
2358 if (record.intersects (glyphs))
2359 intersect_classes->add (record.value);
2363 HBUINT16 classFormat; /* Format identifier--format = 2 */
2364 SortedArray16Of<RangeRecord>
2365 rangeRecord; /* Array of glyph ranges--ordered by
2368 DEFINE_SIZE_ARRAY (4, rangeRecord);
2373 /* Has interface. */
2374 static constexpr unsigned SENTINEL = 0;
2375 typedef unsigned int value_t;
2376 value_t operator [] (hb_codepoint_t k) const { return get (k); }
2377 bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
2379 hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
2381 unsigned int get (hb_codepoint_t k) const { return get_class (k); }
2382 unsigned int get_class (hb_codepoint_t glyph_id) const
2385 case 1: return u.format1.get_class (glyph_id);
2386 case 2: return u.format2.get_class (glyph_id);
2391 template<typename Iterator,
2392 hb_requires (hb_is_iterator (Iterator))>
2393 bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
2395 TRACE_SERIALIZE (this);
2396 if (unlikely (!c->extend_min (this))) return_trace (false);
2398 auto it = + it_with_class_zero | hb_filter (hb_second);
2400 unsigned format = 2;
2403 hb_codepoint_t glyph_min = (*it).first;
2404 hb_codepoint_t glyph_max = glyph_min;
2406 unsigned num_glyphs = 0;
2407 unsigned num_ranges = 1;
2408 hb_codepoint_t prev_gid = glyph_min;
2409 unsigned prev_klass = (*it).second;
2411 for (const auto gid_klass_pair : it)
2413 hb_codepoint_t cur_gid = gid_klass_pair.first;
2414 unsigned cur_klass = gid_klass_pair.second;
2416 if (cur_gid == glyph_min) continue;
2417 if (cur_gid > glyph_max) glyph_max = cur_gid;
2418 if (cur_gid != prev_gid + 1 ||
2419 cur_klass != prev_klass)
2423 prev_klass = cur_klass;
2426 if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2433 case 1: return_trace (u.format1.serialize (c, it));
2434 case 2: return_trace (u.format2.serialize (c, it));
2435 default:return_trace (false);
2439 bool subset (hb_subset_context_t *c,
2440 hb_map_t *klass_map = nullptr /*OUT*/,
2441 bool keep_empty_table = true,
2442 bool use_class_zero = true,
2443 const Coverage* glyph_filter = nullptr) const
2445 TRACE_SUBSET (this);
2447 case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2448 case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2449 default:return_trace (false);
2453 bool sanitize (hb_sanitize_context_t *c) const
2455 TRACE_SANITIZE (this);
2456 if (!u.format.sanitize (c)) return_trace (false);
2458 case 1: return_trace (u.format1.sanitize (c));
2459 case 2: return_trace (u.format2.sanitize (c));
2460 default:return_trace (true);
2464 /* Might return false if array looks unsorted.
2465 * Used for faster rejection of corrupt data. */
2466 template <typename set_t>
2467 bool collect_coverage (set_t *glyphs) const
2470 case 1: return u.format1.collect_coverage (glyphs);
2471 case 2: return u.format2.collect_coverage (glyphs);
2472 default:return false;
2476 /* Might return false if array looks unsorted.
2477 * Used for faster rejection of corrupt data. */
2478 template <typename set_t>
2479 bool collect_class (set_t *glyphs, unsigned int klass) const
2482 case 1: return u.format1.collect_class (glyphs, klass);
2483 case 2: return u.format2.collect_class (glyphs, klass);
2484 default:return false;
2488 bool intersects (const hb_set_t *glyphs) const
2491 case 1: return u.format1.intersects (glyphs);
2492 case 2: return u.format2.intersects (glyphs);
2493 default:return false;
2496 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2499 case 1: return u.format1.intersects_class (glyphs, klass);
2500 case 2: return u.format2.intersects_class (glyphs, klass);
2501 default:return false;
2505 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2508 case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2509 case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2514 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2517 case 1: return u.format1.intersected_classes (glyphs, intersect_classes);
2518 case 2: return u.format2.intersected_classes (glyphs, intersect_classes);
2526 HBUINT16 format; /* Format identifier */
2527 ClassDefFormat1 format1;
2528 ClassDefFormat2 format2;
2531 DEFINE_SIZE_UNION (2, format);
2534 template<typename Iterator>
2535 static inline void ClassDef_serialize (hb_serialize_context_t *c,
2537 { c->start_embed<ClassDef> ()->serialize (c, it); }
2541 * Item Variation Store
2544 struct VarRegionAxis
2546 float evaluate (int coord) const
2548 int start = startCoord, peak = peakCoord, end = endCoord;
2550 /* TODO Move these to sanitize(). */
2551 if (unlikely (start > peak || peak > end))
2553 if (unlikely (start < 0 && end > 0 && peak != 0))
2556 if (peak == 0 || coord == peak)
2559 if (coord <= start || end <= coord)
2564 return float (coord - start) / (peak - start);
2566 return float (end - coord) / (end - peak);
2569 bool sanitize (hb_sanitize_context_t *c) const
2571 TRACE_SANITIZE (this);
2572 return_trace (c->check_struct (this));
2573 /* TODO Handle invalid start/peak/end configs, so we don't
2574 * have to do that at runtime. */
2582 DEFINE_SIZE_STATIC (6);
2585 struct VarRegionList
2587 float evaluate (unsigned int region_index,
2588 const int *coords, unsigned int coord_len) const
2590 if (unlikely (region_index >= regionCount))
2593 const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2596 unsigned int count = axisCount;
2597 for (unsigned int i = 0; i < count; i++)
2599 int coord = i < coord_len ? coords[i] : 0;
2600 float factor = axes[i].evaluate (coord);
2608 bool sanitize (hb_sanitize_context_t *c) const
2610 TRACE_SANITIZE (this);
2611 return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
2614 bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t ®ion_map)
2616 TRACE_SERIALIZE (this);
2617 if (unlikely (!c->extend_min (this))) return_trace (false);
2618 axisCount = src->axisCount;
2619 regionCount = region_map.get_population ();
2620 if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount,
2621 VarRegionAxis::static_size))) return_trace (false);
2622 if (unlikely (!c->extend (this))) return_trace (false);
2623 unsigned int region_count = src->regionCount;
2624 for (unsigned int r = 0; r < regionCount; r++)
2626 unsigned int backward = region_map.backward (r);
2627 if (backward >= region_count) return_trace (false);
2628 memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2631 return_trace (true);
2634 unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
2638 HBUINT15 regionCount;
2640 UnsizedArrayOf<VarRegionAxis>
2643 DEFINE_SIZE_ARRAY (4, axesZ);
2648 unsigned int get_region_index_count () const
2649 { return regionIndices.len; }
2651 unsigned int get_row_size () const
2652 { return shortCount + regionIndices.len; }
2654 unsigned int get_size () const
2656 - regionIndices.min_size + regionIndices.get_size ()
2657 + itemCount * get_row_size ();
2660 float get_delta (unsigned int inner,
2661 const int *coords, unsigned int coord_count,
2662 const VarRegionList ®ions) const
2664 if (unlikely (inner >= itemCount))
2667 unsigned int count = regionIndices.len;
2668 unsigned int scount = shortCount;
2670 const HBUINT8 *bytes = get_delta_bytes ();
2671 const HBUINT8 *row = bytes + inner * (scount + count);
2676 const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
2677 for (; i < scount; i++)
2679 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2680 delta += scalar * *scursor++;
2682 const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2683 for (; i < count; i++)
2685 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2686 delta += scalar * *bcursor++;
2692 void get_region_scalars (const int *coords, unsigned int coord_count,
2693 const VarRegionList ®ions,
2694 float *scalars /*OUT */,
2695 unsigned int num_scalars) const
2697 unsigned count = hb_min (num_scalars, regionIndices.len);
2698 for (unsigned int i = 0; i < count; i++)
2699 scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2700 for (unsigned int i = count; i < num_scalars; i++)
2704 bool sanitize (hb_sanitize_context_t *c) const
2706 TRACE_SANITIZE (this);
2707 return_trace (c->check_struct (this) &&
2708 regionIndices.sanitize (c) &&
2709 shortCount <= regionIndices.len &&
2710 c->check_range (get_delta_bytes (),
2715 bool serialize (hb_serialize_context_t *c,
2717 const hb_inc_bimap_t &inner_map,
2718 const hb_bimap_t ®ion_map)
2720 TRACE_SERIALIZE (this);
2721 if (unlikely (!c->extend_min (this))) return_trace (false);
2722 itemCount = inner_map.get_next_value ();
2724 /* Optimize short count */
2725 unsigned short ri_count = src->regionIndices.len;
2726 enum delta_size_t { kZero=0, kByte, kShort };
2727 hb_vector_t<delta_size_t> delta_sz;
2728 hb_vector_t<unsigned int> ri_map; /* maps old index to new index */
2729 delta_sz.resize (ri_count);
2730 ri_map.resize (ri_count);
2731 unsigned int new_short_count = 0;
2733 for (r = 0; r < ri_count; r++)
2735 delta_sz[r] = kZero;
2736 for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2738 unsigned int old = inner_map.backward (i);
2739 int16_t delta = src->get_item_delta (old, r);
2740 if (delta < -128 || 127 < delta)
2742 delta_sz[r] = kShort;
2746 else if (delta != 0)
2747 delta_sz[r] = kByte;
2750 unsigned int short_index = 0;
2751 unsigned int byte_index = new_short_count;
2752 unsigned int new_ri_count = 0;
2753 for (r = 0; r < ri_count; r++)
2756 ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
2760 shortCount = new_short_count;
2761 regionIndices.len = new_ri_count;
2763 if (unlikely (!c->extend (this))) return_trace (false);
2765 for (r = 0; r < ri_count; r++)
2766 if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
2768 for (unsigned int i = 0; i < itemCount; i++)
2770 unsigned int old = inner_map.backward (i);
2771 for (unsigned int r = 0; r < ri_count; r++)
2772 if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
2775 return_trace (true);
2778 void collect_region_refs (hb_set_t ®ion_indices, const hb_inc_bimap_t &inner_map) const
2780 for (unsigned int r = 0; r < regionIndices.len; r++)
2782 unsigned int region = regionIndices[r];
2783 if (region_indices.has (region)) continue;
2784 for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2785 if (get_item_delta (inner_map.backward (i), r) != 0)
2787 region_indices.add (region);
2794 const HBUINT8 *get_delta_bytes () const
2795 { return &StructAfter<HBUINT8> (regionIndices); }
2797 HBUINT8 *get_delta_bytes ()
2798 { return &StructAfter<HBUINT8> (regionIndices); }
2800 int16_t get_item_delta (unsigned int item, unsigned int region) const
2802 if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
2803 const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
2804 if (region < shortCount)
2805 return ((const HBINT16 *)p)[region];
2807 return (p + HBINT16::static_size * shortCount)[region - shortCount];
2810 void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
2812 HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
2813 if (region < shortCount)
2814 ((HBINT16 *)p)[region] = delta;
2816 (p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
2821 HBUINT16 shortCount;
2822 Array16Of<HBUINT16> regionIndices;
2823 /*UnsizedArrayOf<HBUINT8>bytesX;*/
2825 DEFINE_SIZE_ARRAY (6, regionIndices);
2828 struct VariationStore
2831 float get_delta (unsigned int outer, unsigned int inner,
2832 const int *coords, unsigned int coord_count) const
2838 if (unlikely (outer >= dataSets.len))
2841 return (this+dataSets[outer]).get_delta (inner,
2842 coords, coord_count,
2847 float get_delta (unsigned int index,
2848 const int *coords, unsigned int coord_count) const
2850 unsigned int outer = index >> 16;
2851 unsigned int inner = index & 0xFFFF;
2852 return get_delta (outer, inner, coords, coord_count);
2855 bool sanitize (hb_sanitize_context_t *c) const
2861 TRACE_SANITIZE (this);
2862 return_trace (c->check_struct (this) &&
2864 regions.sanitize (c, this) &&
2865 dataSets.sanitize (c, this));
2868 bool serialize (hb_serialize_context_t *c,
2869 const VariationStore *src,
2870 const hb_array_t <hb_inc_bimap_t> &inner_maps)
2872 TRACE_SERIALIZE (this);
2873 if (unlikely (!c->extend_min (this))) return_trace (false);
2875 unsigned int set_count = 0;
2876 for (unsigned int i = 0; i < inner_maps.length; i++)
2877 if (inner_maps[i].get_population ())
2882 const auto &src_regions = src+src->regions;
2884 hb_set_t region_indices;
2885 for (unsigned int i = 0; i < inner_maps.length; i++)
2886 (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]);
2888 if (region_indices.in_error ())
2889 return_trace (false);
2891 region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID);
2893 /* TODO use constructor when our data-structures support that. */
2894 hb_inc_bimap_t region_map;
2895 + hb_iter (region_indices)
2896 | hb_apply ([®ion_map] (unsigned _) { region_map.add(_); })
2898 if (region_map.in_error())
2899 return_trace (false);
2901 if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map)))
2902 return_trace (false);
2904 dataSets.len = set_count;
2905 if (unlikely (!c->extend (dataSets))) return_trace (false);
2907 /* TODO: The following code could be simplified when
2908 * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */
2909 unsigned int set_index = 0;
2910 for (unsigned int i = 0; i < inner_maps.length; i++)
2912 if (!inner_maps[i].get_population ()) continue;
2913 if (unlikely (!dataSets[set_index++]
2914 .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
2915 return_trace (false);
2918 return_trace (true);
2921 bool subset (hb_subset_context_t *c) const
2923 TRACE_SUBSET (this);
2925 VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
2926 if (unlikely (!varstore_prime)) return_trace (false);
2928 const hb_set_t *variation_indices = c->plan->layout_variation_indices;
2929 if (variation_indices->is_empty ()) return_trace (false);
2931 hb_vector_t<hb_inc_bimap_t> inner_maps;
2932 inner_maps.resize ((unsigned) dataSets.len);
2934 for (unsigned idx : c->plan->layout_variation_indices->iter ())
2936 uint16_t major = idx >> 16;
2937 uint16_t minor = idx & 0xFFFF;
2939 if (major >= inner_maps.length)
2940 return_trace (false);
2941 inner_maps[major].add (minor);
2943 varstore_prime->serialize (c->serializer, this, inner_maps.as_array ());
2946 !c->serializer->in_error()
2947 && varstore_prime->dataSets);
2950 unsigned int get_region_index_count (unsigned int major) const
2951 { return (this+dataSets[major]).get_region_index_count (); }
2953 void get_region_scalars (unsigned int major,
2954 const int *coords, unsigned int coord_count,
2955 float *scalars /*OUT*/,
2956 unsigned int num_scalars) const
2959 for (unsigned i = 0; i < num_scalars; i++)
2964 (this+dataSets[major]).get_region_scalars (coords, coord_count,
2966 &scalars[0], num_scalars);
2969 unsigned int get_sub_table_count () const { return dataSets.len; }
2973 Offset32To<VarRegionList> regions;
2974 Array16OfOffset32To<VarData> dataSets;
2976 DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
2980 * Feature Variations
2983 struct ConditionFormat1
2985 friend struct Condition;
2987 bool subset (hb_subset_context_t *c) const
2989 TRACE_SUBSET (this);
2990 auto *out = c->serializer->embed (this);
2991 if (unlikely (!out)) return_trace (false);
2992 return_trace (true);
2996 bool evaluate (const int *coords, unsigned int coord_len) const
2998 int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
2999 return filterRangeMinValue <= coord && coord <= filterRangeMaxValue;
3002 bool sanitize (hb_sanitize_context_t *c) const
3004 TRACE_SANITIZE (this);
3005 return_trace (c->check_struct (this));
3009 HBUINT16 format; /* Format identifier--format = 1 */
3011 F2DOT14 filterRangeMinValue;
3012 F2DOT14 filterRangeMaxValue;
3014 DEFINE_SIZE_STATIC (8);
3019 bool evaluate (const int *coords, unsigned int coord_len) const
3022 case 1: return u.format1.evaluate (coords, coord_len);
3023 default:return false;
3027 template <typename context_t, typename ...Ts>
3028 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3030 TRACE_DISPATCH (this, u.format);
3031 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3033 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3034 default:return_trace (c->default_return_value ());
3038 bool sanitize (hb_sanitize_context_t *c) const
3040 TRACE_SANITIZE (this);
3041 if (!u.format.sanitize (c)) return_trace (false);
3043 case 1: return_trace (u.format1.sanitize (c));
3044 default:return_trace (true);
3050 HBUINT16 format; /* Format identifier */
3051 ConditionFormat1 format1;
3054 DEFINE_SIZE_UNION (2, format);
3059 bool evaluate (const int *coords, unsigned int coord_len) const
3061 unsigned int count = conditions.len;
3062 for (unsigned int i = 0; i < count; i++)
3063 if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
3068 bool subset (hb_subset_context_t *c) const
3070 TRACE_SUBSET (this);
3071 auto *out = c->serializer->start_embed (this);
3072 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3074 + conditions.iter ()
3075 | hb_apply (subset_offset_array (c, out->conditions, this))
3078 return_trace (bool (out->conditions));
3081 bool sanitize (hb_sanitize_context_t *c) const
3083 TRACE_SANITIZE (this);
3084 return_trace (conditions.sanitize (c, this));
3088 Array16OfOffset32To<Condition> conditions;
3090 DEFINE_SIZE_ARRAY (2, conditions);
3093 struct FeatureTableSubstitutionRecord
3095 friend struct FeatureTableSubstitution;
3097 void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
3099 return (base+feature).add_lookup_indexes_to (lookup_indexes);
3102 void closure_features (const void *base,
3103 const hb_map_t *lookup_indexes,
3104 hb_set_t *feature_indexes /* OUT */) const
3106 if ((base+feature).intersects_lookup_indexes (lookup_indexes))
3107 feature_indexes->add (featureIndex);
3110 bool subset (hb_subset_layout_context_t *c, const void *base) const
3112 TRACE_SUBSET (this);
3113 if (!c->feature_index_map->has (featureIndex)) {
3114 // Feature that is being substituted is not being retained, so we don't
3116 return_trace (false);
3119 auto *out = c->subset_context->serializer->embed (this);
3120 if (unlikely (!out)) return_trace (false);
3122 out->featureIndex = c->feature_index_map->get (featureIndex);
3123 bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c);
3127 bool sanitize (hb_sanitize_context_t *c, const void *base) const
3129 TRACE_SANITIZE (this);
3130 return_trace (c->check_struct (this) && feature.sanitize (c, base));
3134 HBUINT16 featureIndex;
3135 Offset32To<Feature> feature;
3137 DEFINE_SIZE_STATIC (6);
3140 struct FeatureTableSubstitution
3142 const Feature *find_substitute (unsigned int feature_index) const
3144 unsigned int count = substitutions.len;
3145 for (unsigned int i = 0; i < count; i++)
3147 const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
3148 if (record.featureIndex == feature_index)
3149 return &(this+record.feature);
3154 void collect_lookups (const hb_set_t *feature_indexes,
3155 hb_set_t *lookup_indexes /* OUT */) const
3157 + hb_iter (substitutions)
3158 | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
3159 | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
3160 { r.collect_lookups (this, lookup_indexes); })
3164 void closure_features (const hb_map_t *lookup_indexes,
3165 hb_set_t *feature_indexes /* OUT */) const
3167 for (const FeatureTableSubstitutionRecord& record : substitutions)
3168 record.closure_features (this, lookup_indexes, feature_indexes);
3171 bool intersects_features (const hb_map_t *feature_index_map) const
3173 for (const FeatureTableSubstitutionRecord& record : substitutions)
3175 if (feature_index_map->has (record.featureIndex)) return true;
3180 bool subset (hb_subset_context_t *c,
3181 hb_subset_layout_context_t *l) const
3183 TRACE_SUBSET (this);
3184 auto *out = c->serializer->start_embed (*this);
3185 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3187 out->version.major = version.major;
3188 out->version.minor = version.minor;
3190 + substitutions.iter ()
3191 | hb_apply (subset_record_array (l, &(out->substitutions), this))
3194 return_trace (bool (out->substitutions));
3197 bool sanitize (hb_sanitize_context_t *c) const
3199 TRACE_SANITIZE (this);
3200 return_trace (version.sanitize (c) &&
3201 likely (version.major == 1) &&
3202 substitutions.sanitize (c, this));
3206 FixedVersion<> version; /* Version--0x00010000u */
3207 Array16Of<FeatureTableSubstitutionRecord>
3210 DEFINE_SIZE_ARRAY (6, substitutions);
3213 struct FeatureVariationRecord
3215 friend struct FeatureVariations;
3217 void collect_lookups (const void *base,
3218 const hb_set_t *feature_indexes,
3219 hb_set_t *lookup_indexes /* OUT */) const
3221 return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
3224 void closure_features (const void *base,
3225 const hb_map_t *lookup_indexes,
3226 hb_set_t *feature_indexes /* OUT */) const
3228 (base+substitutions).closure_features (lookup_indexes, feature_indexes);
3231 bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
3233 return (base+substitutions).intersects_features (feature_index_map);
3236 bool subset (hb_subset_layout_context_t *c, const void *base) const
3238 TRACE_SUBSET (this);
3239 auto *out = c->subset_context->serializer->embed (this);
3240 if (unlikely (!out)) return_trace (false);
3242 out->conditions.serialize_subset (c->subset_context, conditions, base);
3243 out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
3245 return_trace (true);
3248 bool sanitize (hb_sanitize_context_t *c, const void *base) const
3250 TRACE_SANITIZE (this);
3251 return_trace (conditions.sanitize (c, base) &&
3252 substitutions.sanitize (c, base));
3256 Offset32To<ConditionSet>
3258 Offset32To<FeatureTableSubstitution>
3261 DEFINE_SIZE_STATIC (8);
3264 struct FeatureVariations
3266 static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
3268 bool find_index (const int *coords, unsigned int coord_len,
3269 unsigned int *index) const
3271 unsigned int count = varRecords.len;
3272 for (unsigned int i = 0; i < count; i++)
3274 const FeatureVariationRecord &record = varRecords.arrayZ[i];
3275 if ((this+record.conditions).evaluate (coords, coord_len))
3281 *index = NOT_FOUND_INDEX;
3285 const Feature *find_substitute (unsigned int variations_index,
3286 unsigned int feature_index) const
3288 const FeatureVariationRecord &record = varRecords[variations_index];
3289 return (this+record.substitutions).find_substitute (feature_index);
3292 FeatureVariations* copy (hb_serialize_context_t *c) const
3294 TRACE_SERIALIZE (this);
3295 return_trace (c->embed (*this));
3298 void collect_lookups (const hb_set_t *feature_indexes,
3299 hb_set_t *lookup_indexes /* OUT */) const
3301 for (const FeatureVariationRecord& r : varRecords)
3302 r.collect_lookups (this, feature_indexes, lookup_indexes);
3305 void closure_features (const hb_map_t *lookup_indexes,
3306 hb_set_t *feature_indexes /* OUT */) const
3308 for (const FeatureVariationRecord& record : varRecords)
3309 record.closure_features (this, lookup_indexes, feature_indexes);
3312 bool subset (hb_subset_context_t *c,
3313 hb_subset_layout_context_t *l) const
3315 TRACE_SUBSET (this);
3316 auto *out = c->serializer->start_embed (*this);
3317 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3319 out->version.major = version.major;
3320 out->version.minor = version.minor;
3322 int keep_up_to = -1;
3323 for (int i = varRecords.len - 1; i >= 0; i--) {
3324 if (varRecords[i].intersects_features (this, l->feature_index_map)) {
3330 unsigned count = (unsigned) (keep_up_to + 1);
3331 for (unsigned i = 0; i < count; i++) {
3332 subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
3334 return_trace (bool (out->varRecords));
3337 bool sanitize (hb_sanitize_context_t *c) const
3339 TRACE_SANITIZE (this);
3340 return_trace (version.sanitize (c) &&
3341 likely (version.major == 1) &&
3342 varRecords.sanitize (c, this));
3346 FixedVersion<> version; /* Version--0x00010000u */
3347 Array32Of<FeatureVariationRecord>
3350 DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
3358 struct HintingDevice
3360 friend struct Device;
3364 hb_position_t get_x_delta (hb_font_t *font) const
3365 { return get_delta (font->x_ppem, font->x_scale); }
3367 hb_position_t get_y_delta (hb_font_t *font) const
3368 { return get_delta (font->y_ppem, font->y_scale); }
3372 unsigned int get_size () const
3374 unsigned int f = deltaFormat;
3375 if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
3376 return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
3379 bool sanitize (hb_sanitize_context_t *c) const
3381 TRACE_SANITIZE (this);
3382 return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
3385 HintingDevice* copy (hb_serialize_context_t *c) const
3387 TRACE_SERIALIZE (this);
3388 return_trace (c->embed<HintingDevice> (this));
3393 int get_delta (unsigned int ppem, int scale) const
3395 if (!ppem) return 0;
3397 int pixels = get_delta_pixels (ppem);
3399 if (!pixels) return 0;
3401 return (int) (pixels * (int64_t) scale / ppem);
3403 int get_delta_pixels (unsigned int ppem_size) const
3405 unsigned int f = deltaFormat;
3406 if (unlikely (f < 1 || f > 3))
3409 if (ppem_size < startSize || ppem_size > endSize)
3412 unsigned int s = ppem_size - startSize;
3414 unsigned int byte = deltaValueZ[s >> (4 - f)];
3415 unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
3416 unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
3418 int delta = bits & mask;
3420 if ((unsigned int) delta >= ((mask + 1) >> 1))
3427 HBUINT16 startSize; /* Smallest size to correct--in ppem */
3428 HBUINT16 endSize; /* Largest size to correct--in ppem */
3429 HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3
3430 * 1 Signed 2-bit value, 8 values per uint16
3431 * 2 Signed 4-bit value, 4 values per uint16
3432 * 3 Signed 8-bit value, 2 values per uint16
3434 UnsizedArrayOf<HBUINT16>
3435 deltaValueZ; /* Array of compressed data */
3437 DEFINE_SIZE_ARRAY (6, deltaValueZ);
3440 struct VariationDevice
3442 friend struct Device;
3446 hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
3447 { return font->em_scalef_x (get_delta (font, store)); }
3449 hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
3450 { return font->em_scalef_y (get_delta (font, store)); }
3452 VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
3454 TRACE_SERIALIZE (this);
3455 auto snap = c->snapshot ();
3456 auto *out = c->embed (this);
3457 if (unlikely (!out)) return_trace (nullptr);
3458 if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
3460 /* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
3461 if (!layout_variation_idx_map->has (varIdx))
3464 return_trace (nullptr);
3466 unsigned new_idx = layout_variation_idx_map->get (varIdx);
3467 out->varIdx = new_idx;
3471 void record_variation_index (hb_set_t *layout_variation_indices) const
3473 layout_variation_indices->add (varIdx);
3476 bool sanitize (hb_sanitize_context_t *c) const
3478 TRACE_SANITIZE (this);
3479 return_trace (c->check_struct (this));
3484 float get_delta (hb_font_t *font, const VariationStore &store) const
3486 return store.get_delta (varIdx, font->coords, font->num_coords);
3491 HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */
3493 DEFINE_SIZE_STATIC (6);
3502 HBUINT16 format; /* Format identifier */
3504 DEFINE_SIZE_STATIC (6);
3509 hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3513 #ifndef HB_NO_HINTING
3514 case 1: case 2: case 3:
3515 return u.hinting.get_x_delta (font);
3519 return u.variation.get_x_delta (font, store);
3525 hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3529 case 1: case 2: case 3:
3530 #ifndef HB_NO_HINTING
3531 return u.hinting.get_y_delta (font);
3535 return u.variation.get_y_delta (font, store);
3542 bool sanitize (hb_sanitize_context_t *c) const
3544 TRACE_SANITIZE (this);
3545 if (!u.b.format.sanitize (c)) return_trace (false);
3546 switch (u.b.format) {
3547 #ifndef HB_NO_HINTING
3548 case 1: case 2: case 3:
3549 return_trace (u.hinting.sanitize (c));
3553 return_trace (u.variation.sanitize (c));
3556 return_trace (true);
3560 Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
3562 TRACE_SERIALIZE (this);
3563 switch (u.b.format) {
3564 #ifndef HB_NO_HINTING
3568 return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
3572 return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_map)));
3575 return_trace (nullptr);
3579 void collect_variation_indices (hb_set_t *layout_variation_indices) const
3581 switch (u.b.format) {
3582 #ifndef HB_NO_HINTING
3590 u.variation.record_variation_index (layout_variation_indices);
3601 HintingDevice hinting;
3603 VariationDevice variation;
3607 DEFINE_SIZE_UNION (6, b);
3611 } /* namespace OT */
3614 #endif /* HB_OT_LAYOUT_COMMON_HH */