2 * Copyright © 2017 Google, Inc.
4 * This is part of HarfBuzz, a text shaping library.
6 * Permission is hereby granted, without written agreement and without
7 * license or royalty fees, to use, copy, modify, and distribute this
8 * software and its documentation for any purpose, provided that the
9 * above copyright notice and the following two paragraphs appear in
10 * all copies of this software.
12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
20 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 * Google Author(s): Behdad Esfahbod
27 #ifndef HB_OT_VAR_HVAR_TABLE_HH
28 #define HB_OT_VAR_HVAR_TABLE_HH
30 #include "hb-ot-layout-common.hh"
36 struct DeltaSetIndexMap
38 bool sanitize (hb_sanitize_context_t *c) const
40 TRACE_SANITIZE (this);
41 return_trace (c->check_struct (this) &&
42 c->check_range (mapDataZ.arrayZ,
48 bool serialize (hb_serialize_context_t *c, const T &plan)
50 unsigned int width = plan.get_width ();
51 unsigned int inner_bit_count = plan.get_inner_bit_count ();
52 const hb_array_t<const unsigned int> output_map = plan.get_output_map ();
54 TRACE_SERIALIZE (this);
55 if (unlikely (output_map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0))))
57 if (unlikely (!c->extend_min (*this))) return_trace (false);
59 format = ((width-1)<<4)|(inner_bit_count-1);
60 mapCount = output_map.length;
61 HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length);
62 if (unlikely (!p)) return_trace (false);
63 for (unsigned int i = 0; i < output_map.length; i++)
65 unsigned int v = output_map[i];
66 unsigned int outer = v >> 16;
67 unsigned int inner = v & 0xFFFF;
68 unsigned int u = (outer << inner_bit_count) | inner;
69 for (unsigned int w = width; w > 0;)
79 unsigned int map (unsigned int v) const /* Returns 16.16 outer.inner. */
81 /* If count is zero, pass value unchanged. This takes
82 * care of direct mapping for advance map. */
91 unsigned int w = get_width ();
92 const HBUINT8 *p = mapDataZ.arrayZ + w * v;
98 unsigned int n = get_inner_bit_count ();
99 unsigned int outer = u >> n;
100 unsigned int inner = u & ((1 << n) - 1);
101 u = (outer<<16) | inner;
107 unsigned int get_map_count () const { return mapCount; }
108 unsigned int get_width () const { return ((format >> 4) & 3) + 1; }
109 unsigned int get_inner_bit_count () const { return (format & 0xF) + 1; }
112 HBUINT16 format; /* A packed field that describes the compressed
113 * representation of delta-set indices. */
114 HBUINT16 mapCount; /* The number of mapping entries. */
115 UnsizedArrayOf<HBUINT8>
116 mapDataZ; /* The delta-set index mapping data. */
119 DEFINE_SIZE_ARRAY (4, mapDataZ);
122 struct index_map_subset_plan_t
124 enum index_map_index_t {
126 LSB_INDEX, /* dual as TSB */
127 RSB_INDEX, /* dual as BSB */
131 void init (const DeltaSetIndexMap &index_map,
132 hb_inc_bimap_t &outer_map,
133 hb_vector_t<hb_set_t *> &inner_sets,
134 const hb_subset_plan_t *plan)
142 if (&index_map == &Null (DeltaSetIndexMap)) return;
144 unsigned int last_val = (unsigned int)-1;
145 hb_codepoint_t last_gid = (hb_codepoint_t)-1;
146 hb_codepoint_t gid = (hb_codepoint_t) hb_min (index_map.get_map_count (), plan->num_output_glyphs ());
148 outer_bit_count = (index_map.get_width () * 8) - index_map.get_inner_bit_count ();
149 max_inners.resize (inner_sets.length);
150 for (unsigned i = 0; i < inner_sets.length; i++) max_inners[i] = 0;
152 /* Search backwards for a map value different from the last map value */
153 for (; gid > 0; gid--)
155 hb_codepoint_t old_gid;
156 if (!plan->old_gid_for_new_gid (gid - 1, &old_gid))
158 if (last_gid == (hb_codepoint_t) -1)
164 unsigned int v = index_map.map (old_gid);
165 if (last_gid == (hb_codepoint_t) -1)
171 if (v != last_val) break;
176 if (unlikely (last_gid == (hb_codepoint_t)-1)) return;
177 map_count = last_gid;
178 for (gid = 0; gid < map_count; gid++)
180 hb_codepoint_t old_gid;
181 if (plan->old_gid_for_new_gid (gid, &old_gid))
183 unsigned int v = index_map.map (old_gid);
184 unsigned int outer = v >> 16;
185 unsigned int inner = v & 0xFFFF;
186 outer_map.add (outer);
187 if (inner > max_inners[outer]) max_inners[outer] = inner;
188 if (outer >= inner_sets.length) return;
189 inner_sets[outer]->add (inner);
200 void remap (const DeltaSetIndexMap *input_map,
201 const hb_inc_bimap_t &outer_map,
202 const hb_vector_t<hb_inc_bimap_t> &inner_maps,
203 const hb_subset_plan_t *plan)
205 if (input_map == &Null (DeltaSetIndexMap)) return;
207 for (unsigned int i = 0; i < max_inners.length; i++)
209 if (inner_maps[i].get_population () == 0) continue;
210 unsigned int bit_count = (max_inners[i]==0)? 1: hb_bit_storage (inner_maps[i][max_inners[i]]);
211 if (bit_count > inner_bit_count) inner_bit_count = bit_count;
214 output_map.resize (map_count);
215 for (hb_codepoint_t gid = 0; gid < output_map.length; gid++)
217 hb_codepoint_t old_gid;
218 if (plan->old_gid_for_new_gid (gid, &old_gid))
220 unsigned int v = input_map->map (old_gid);
221 unsigned int outer = v >> 16;
222 output_map[gid] = (outer_map[outer] << 16) | (inner_maps[outer][v & 0xFFFF]);
225 output_map[gid] = 0; /* Map unused glyph to outer/inner=0/0 */
229 unsigned int get_inner_bit_count () const { return inner_bit_count; }
230 unsigned int get_width () const { return ((outer_bit_count + inner_bit_count + 7) / 8); }
231 unsigned int get_map_count () const { return map_count; }
233 unsigned int get_size () const
234 { return (map_count? (DeltaSetIndexMap::min_size + get_width () * map_count): 0); }
236 bool is_identity () const { return get_output_map ().length == 0; }
237 hb_array_t<const unsigned int> get_output_map () const { return output_map.as_array (); }
240 unsigned int map_count;
241 hb_vector_t<unsigned int> max_inners;
242 unsigned int outer_bit_count;
243 unsigned int inner_bit_count;
244 hb_vector_t<unsigned int> output_map;
247 struct hvarvvar_subset_plan_t
249 hvarvvar_subset_plan_t() : inner_maps (), index_map_plans () {}
250 ~hvarvvar_subset_plan_t() { fini (); }
252 void init (const hb_array_t<const DeltaSetIndexMap *> &index_maps,
253 const VariationStore &_var_store,
254 const hb_subset_plan_t *plan)
256 index_map_plans.resize (index_maps.length);
258 var_store = &_var_store;
259 inner_sets.resize (var_store->get_sub_table_count ());
260 for (unsigned int i = 0; i < inner_sets.length; i++)
261 inner_sets[i] = hb_set_create ();
262 adv_set = hb_set_create ();
264 inner_maps.resize (var_store->get_sub_table_count ());
266 for (unsigned int i = 0; i < inner_maps.length; i++)
267 inner_maps[i].init ();
269 if (unlikely (!index_map_plans.length || !inner_sets.length || !inner_maps.length)) return;
271 bool retain_adv_map = false;
272 index_map_plans[0].init (*index_maps[0], outer_map, inner_sets, plan);
273 if (index_maps[0] == &Null (DeltaSetIndexMap))
275 retain_adv_map = plan->retain_gids;
277 for (hb_codepoint_t gid = 0; gid < plan->num_output_glyphs (); gid++)
279 hb_codepoint_t old_gid;
280 if (plan->old_gid_for_new_gid (gid, &old_gid))
281 inner_sets[0]->add (old_gid);
283 hb_set_union (adv_set, inner_sets[0]);
286 for (unsigned int i = 1; i < index_maps.length; i++)
287 index_map_plans[i].init (*index_maps[i], outer_map, inner_sets, plan);
293 for (hb_codepoint_t gid = 0; gid < plan->num_output_glyphs (); gid++)
294 if (inner_sets[0]->has (gid))
295 inner_maps[0].add (gid);
297 inner_maps[0].skip ();
301 inner_maps[0].add_set (adv_set);
302 hb_set_subtract (inner_sets[0], adv_set);
303 inner_maps[0].add_set (inner_sets[0]);
306 for (unsigned int i = 1; i < inner_maps.length; i++)
307 inner_maps[i].add_set (inner_sets[i]);
309 for (unsigned int i = 0; i < index_maps.length; i++)
310 index_map_plans[i].remap (index_maps[i], outer_map, inner_maps, plan);
315 for (unsigned int i = 0; i < inner_sets.length; i++)
316 hb_set_destroy (inner_sets[i]);
317 hb_set_destroy (adv_set);
318 inner_maps.fini_deep ();
319 index_map_plans.fini_deep ();
322 hb_inc_bimap_t outer_map;
323 hb_vector_t<hb_inc_bimap_t> inner_maps;
324 hb_vector_t<index_map_subset_plan_t> index_map_plans;
325 const VariationStore *var_store;
328 hb_vector_t<hb_set_t *> inner_sets;
333 * HVAR -- Horizontal Metrics Variations
334 * https://docs.microsoft.com/en-us/typography/opentype/spec/hvar
335 * VVAR -- Vertical Metrics Variations
336 * https://docs.microsoft.com/en-us/typography/opentype/spec/vvar
338 #define HB_OT_TAG_HVAR HB_TAG('H','V','A','R')
339 #define HB_OT_TAG_VVAR HB_TAG('V','V','A','R')
343 static constexpr hb_tag_t HVARTag = HB_OT_TAG_HVAR;
344 static constexpr hb_tag_t VVARTag = HB_OT_TAG_VVAR;
346 bool sanitize (hb_sanitize_context_t *c) const
348 TRACE_SANITIZE (this);
349 return_trace (version.sanitize (c) &&
350 likely (version.major == 1) &&
351 varStore.sanitize (c, this) &&
352 advMap.sanitize (c, this) &&
353 lsbMap.sanitize (c, this) &&
354 rsbMap.sanitize (c, this));
357 void listup_index_maps (hb_vector_t<const DeltaSetIndexMap *> &index_maps) const
359 index_maps.push (&(this+advMap));
360 index_maps.push (&(this+lsbMap));
361 index_maps.push (&(this+rsbMap));
364 bool serialize_index_maps (hb_serialize_context_t *c,
365 const hb_array_t<index_map_subset_plan_t> &im_plans)
367 TRACE_SERIALIZE (this);
368 if (im_plans[index_map_subset_plan_t::ADV_INDEX].is_identity ())
370 else if (unlikely (!advMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::ADV_INDEX])))
371 return_trace (false);
372 if (im_plans[index_map_subset_plan_t::LSB_INDEX].is_identity ())
374 else if (unlikely (!lsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::LSB_INDEX])))
375 return_trace (false);
376 if (im_plans[index_map_subset_plan_t::RSB_INDEX].is_identity ())
378 else if (unlikely (!rsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::RSB_INDEX])))
379 return_trace (false);
384 template <typename T>
385 bool _subset (hb_subset_context_t *c) const
388 hvarvvar_subset_plan_t hvar_plan;
389 hb_vector_t<const DeltaSetIndexMap *>
392 ((T*)this)->listup_index_maps (index_maps);
393 hvar_plan.init (index_maps.as_array (), this+varStore, c->plan);
395 T *out = c->serializer->allocate_min<T> ();
396 if (unlikely (!out)) return_trace (false);
398 out->version.major = 1;
399 out->version.minor = 0;
401 if (unlikely (!out->varStore.serialize (c->serializer, out)
402 .serialize (c->serializer, hvar_plan.var_store, hvar_plan.inner_maps.as_array ())))
403 return_trace (false);
405 return_trace (out->T::serialize_index_maps (c->serializer,
406 hvar_plan.index_map_plans.as_array ()));
409 float get_advance_var (hb_codepoint_t glyph, hb_font_t *font) const
411 unsigned int varidx = (this+advMap).map (glyph);
412 return (this+varStore).get_delta (varidx, font->coords, font->num_coords);
415 float get_side_bearing_var (hb_codepoint_t glyph,
416 const int *coords, unsigned int coord_count) const
418 if (!has_side_bearing_deltas ()) return 0.f;
419 unsigned int varidx = (this+lsbMap).map (glyph);
420 return (this+varStore).get_delta (varidx, coords, coord_count);
423 bool has_side_bearing_deltas () const { return lsbMap && rsbMap; }
426 FixedVersion<>version; /* Version of the metrics variation table
427 * initially set to 0x00010000u */
428 LOffsetTo<VariationStore>
429 varStore; /* Offset to item variation store table. */
430 LOffsetTo<DeltaSetIndexMap>
431 advMap; /* Offset to advance var-idx mapping. */
432 LOffsetTo<DeltaSetIndexMap>
433 lsbMap; /* Offset to lsb/tsb var-idx mapping. */
434 LOffsetTo<DeltaSetIndexMap>
435 rsbMap; /* Offset to rsb/bsb var-idx mapping. */
438 DEFINE_SIZE_STATIC (20);
441 struct HVAR : HVARVVAR {
442 static constexpr hb_tag_t tableTag = HB_OT_TAG_HVAR;
443 bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset<HVAR> (c); }
445 struct VVAR : HVARVVAR {
446 static constexpr hb_tag_t tableTag = HB_OT_TAG_VVAR;
448 bool sanitize (hb_sanitize_context_t *c) const
450 TRACE_SANITIZE (this);
451 return_trace (static_cast<const HVARVVAR *> (this)->sanitize (c) &&
452 vorgMap.sanitize (c, this));
455 void listup_index_maps (hb_vector_t<const DeltaSetIndexMap *> &index_maps) const
457 HVARVVAR::listup_index_maps (index_maps);
458 index_maps.push (&(this+vorgMap));
461 bool serialize_index_maps (hb_serialize_context_t *c,
462 const hb_array_t<index_map_subset_plan_t> &im_plans)
464 TRACE_SERIALIZE (this);
465 if (unlikely (!HVARVVAR::serialize_index_maps (c, im_plans)))
466 return_trace (false);
467 if (!im_plans[index_map_subset_plan_t::VORG_INDEX].get_map_count ())
469 else if (unlikely (!vorgMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::VORG_INDEX])))
470 return_trace (false);
475 bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset<VVAR> (c); }
478 LOffsetTo<DeltaSetIndexMap>
479 vorgMap; /* Offset to vertical-origin var-idx mapping. */
482 DEFINE_SIZE_STATIC (24);
488 #endif /* HB_OT_VAR_HVAR_TABLE_HH */