1 /* 2 * Copyright © 2019 Adobe Inc. 3 * Copyright © 2019 Ebrahim Byagowi 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Adobe Author(s): Michiharu Ariza 26 */ 27 28 #ifndef HB_OT_VAR_GVAR_TABLE_HH 29 #define HB_OT_VAR_GVAR_TABLE_HH 30 31 #include "hb-open-type.hh" 32 #include "hb-ot-var-common.hh" 33 34 /* 35 * gvar -- Glyph Variation Table 36 * https://docs.microsoft.com/en-us/typography/opentype/spec/gvar 37 */ 38 #define HB_OT_TAG_gvar HB_TAG('g','v','a','r') 39 40 namespace OT { 41 42 struct GlyphVariationData : TupleVariationData 43 {}; 44 45 struct glyph_variations_t 46 { 47 using tuple_variations_t = TupleVariationData::tuple_variations_t; 48 hb_vector_t<tuple_variations_t> glyph_variations; 49 50 hb_vector_t<char> compiled_shared_tuples; 51 private: 52 unsigned shared_tuples_count = 0; 53 54 /* shared coords-> index map after instantiation */ 55 hb_hashmap_t<const hb_vector_t<char>*, unsigned> shared_tuples_idx_map; 56 57 public: compiled_shared_tuples_countOT::glyph_variations_t58 unsigned compiled_shared_tuples_count () const 59 { return shared_tuples_count; } 60 compiled_byte_sizeOT::glyph_variations_t61 unsigned compiled_byte_size () const 62 { 63 unsigned byte_size = 0; 64 for (const auto& _ : glyph_variations) 65 byte_size += _.get_compiled_byte_size (); 66 67 return byte_size; 68 } 69 create_from_glyphs_var_dataOT::glyph_variations_t70 bool create_from_glyphs_var_data (unsigned axis_count, 71 const hb_array_t<const F2DOT14> shared_tuples, 72 const hb_subset_plan_t *plan, 73 const hb_hashmap_t<hb_codepoint_t, hb_bytes_t>& new_gid_var_data_map) 74 { 75 if (unlikely (!glyph_variations.alloc (plan->new_to_old_gid_list.length, true))) 76 return false; 77 78 auto it = hb_iter (plan->new_to_old_gid_list); 79 for (auto &_ : it) 80 { 81 hb_codepoint_t new_gid = _.first; 82 contour_point_vector_t *all_contour_points; 83 if (!new_gid_var_data_map.has (new_gid) || 84 !plan->new_gid_contour_points_map.has (new_gid, &all_contour_points)) 85 return false; 86 hb_bytes_t var_data = new_gid_var_data_map.get (new_gid); 87 88 const GlyphVariationData* p = reinterpret_cast<const GlyphVariationData*> (var_data.arrayZ); 89 hb_vector_t<unsigned> shared_indices; 90 GlyphVariationData::tuple_iterator_t iterator; 91 tuple_variations_t tuple_vars; 92 93 /* in case variation data is empty, push an empty struct into the vector, 94 * keep the vector in sync with the new_to_old_gid_list */ 95 if (!var_data || ! p->has_data () || !all_contour_points->length || 96 !GlyphVariationData::get_tuple_iterator (var_data, axis_count, 97 var_data.arrayZ, 98 shared_indices, &iterator)) 99 { 100 glyph_variations.push (std::move (tuple_vars)); 101 continue; 102 } 103 104 bool is_composite_glyph = false; 105 is_composite_glyph = plan->composite_new_gids.has (new_gid); 106 107 if (!p->decompile_tuple_variations (all_contour_points->length, true /* is_gvar */, 108 iterator, &(plan->axes_old_index_tag_map), 109 shared_indices, shared_tuples, 110 tuple_vars, /* OUT */ 111 is_composite_glyph)) 112 return false; 113 glyph_variations.push (std::move (tuple_vars)); 114 } 115 return !glyph_variations.in_error () && glyph_variations.length == plan->new_to_old_gid_list.length; 116 } 117 instantiateOT::glyph_variations_t118 bool instantiate (const hb_subset_plan_t *plan) 119 { 120 unsigned count = plan->new_to_old_gid_list.length; 121 bool iup_optimize = false; 122 iup_optimize = plan->flags & HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS; 123 for (unsigned i = 0; i < count; i++) 124 { 125 hb_codepoint_t new_gid = plan->new_to_old_gid_list[i].first; 126 contour_point_vector_t *all_points; 127 if (!plan->new_gid_contour_points_map.has (new_gid, &all_points)) 128 return false; 129 if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, all_points, iup_optimize)) 130 return false; 131 } 132 return true; 133 } 134 compile_bytesOT::glyph_variations_t135 bool compile_bytes (const hb_map_t& axes_index_map, 136 const hb_map_t& axes_old_index_tag_map) 137 { 138 if (!compile_shared_tuples (axes_index_map, axes_old_index_tag_map)) 139 return false; 140 for (tuple_variations_t& vars: glyph_variations) 141 if (!vars.compile_bytes (axes_index_map, axes_old_index_tag_map, 142 true, /* use shared points*/ 143 true, 144 &shared_tuples_idx_map)) 145 return false; 146 147 return true; 148 } 149 compile_shared_tuplesOT::glyph_variations_t150 bool compile_shared_tuples (const hb_map_t& axes_index_map, 151 const hb_map_t& axes_old_index_tag_map) 152 { 153 /* key is pointer to compiled_peak_coords inside each tuple, hashing 154 * function will always deref pointers first */ 155 hb_hashmap_t<const hb_vector_t<char>*, unsigned> coords_count_map; 156 157 /* count the num of shared coords */ 158 for (tuple_variations_t& vars: glyph_variations) 159 { 160 for (tuple_delta_t& var : vars.tuple_vars) 161 { 162 if (!var.compile_peak_coords (axes_index_map, axes_old_index_tag_map)) 163 return false; 164 unsigned* count; 165 if (coords_count_map.has (&(var.compiled_peak_coords), &count)) 166 coords_count_map.set (&(var.compiled_peak_coords), *count + 1); 167 else 168 coords_count_map.set (&(var.compiled_peak_coords), 1); 169 } 170 } 171 172 if (!coords_count_map || coords_count_map.in_error ()) 173 return false; 174 175 /* add only those coords that are used more than once into the vector and sort */ 176 hb_vector_t<const hb_vector_t<char>*> shared_coords; 177 if (unlikely (!shared_coords.alloc (coords_count_map.get_population ()))) 178 return false; 179 180 for (const auto _ : coords_count_map.iter ()) 181 { 182 if (_.second == 1) continue; 183 shared_coords.push (_.first); 184 } 185 186 /* no shared tuples: no coords are used more than once */ 187 if (!shared_coords) return true; 188 /* sorting based on the coords frequency first (high to low), then compare 189 * the coords bytes */ 190 hb_qsort (shared_coords.arrayZ, shared_coords.length, sizeof (hb_vector_t<char>*), _cmp_coords, (void *) (&coords_count_map)); 191 192 /* build shared_coords->idx map and shared tuples byte array */ 193 194 shared_tuples_count = hb_min (0xFFFu + 1, shared_coords.length); 195 unsigned len = shared_tuples_count * (shared_coords[0]->length); 196 if (unlikely (!compiled_shared_tuples.alloc (len))) 197 return false; 198 199 for (unsigned i = 0; i < shared_tuples_count; i++) 200 { 201 shared_tuples_idx_map.set (shared_coords[i], i); 202 /* add a concat() in hb_vector_t? */ 203 for (char c : shared_coords[i]->iter ()) 204 compiled_shared_tuples.push (c); 205 } 206 207 return true; 208 } 209 _cmp_coordsOT::glyph_variations_t210 static int _cmp_coords (const void *pa, const void *pb, void *arg) 211 { 212 const hb_hashmap_t<const hb_vector_t<char>*, unsigned>* coords_count_map = 213 reinterpret_cast<const hb_hashmap_t<const hb_vector_t<char>*, unsigned>*> (arg); 214 215 /* shared_coords is hb_vector_t<const hb_vector_t<char>*> so casting pa/pb 216 * to be a pointer to a pointer */ 217 const hb_vector_t<char>** a = reinterpret_cast<const hb_vector_t<char>**> (const_cast<void*>(pa)); 218 const hb_vector_t<char>** b = reinterpret_cast<const hb_vector_t<char>**> (const_cast<void*>(pb)); 219 220 bool has_a = coords_count_map->has (*a); 221 bool has_b = coords_count_map->has (*b); 222 223 if (has_a && has_b) 224 { 225 unsigned a_num = coords_count_map->get (*a); 226 unsigned b_num = coords_count_map->get (*b); 227 228 if (a_num != b_num) 229 return b_num - a_num; 230 231 return (*b)->as_array().cmp ((*a)->as_array ()); 232 } 233 else if (has_a) return -1; 234 else if (has_b) return 1; 235 else return 0; 236 } 237 238 template<typename Iterator, 239 hb_requires (hb_is_iterator (Iterator))> serialize_glyph_var_dataOT::glyph_variations_t240 bool serialize_glyph_var_data (hb_serialize_context_t *c, 241 Iterator it, 242 bool long_offset, 243 unsigned num_glyphs, 244 char* glyph_var_data_offsets /* OUT: glyph var data offsets array */) const 245 { 246 TRACE_SERIALIZE (this); 247 248 if (long_offset) 249 { 250 ((HBUINT32 *) glyph_var_data_offsets)[0] = 0; 251 glyph_var_data_offsets += 4; 252 } 253 else 254 { 255 ((HBUINT16 *) glyph_var_data_offsets)[0] = 0; 256 glyph_var_data_offsets += 2; 257 } 258 unsigned glyph_offset = 0; 259 hb_codepoint_t last_gid = 0; 260 unsigned idx = 0; 261 262 TupleVariationData* cur_glyph = c->start_embed<TupleVariationData> (); 263 if (!cur_glyph) return_trace (false); 264 for (auto &_ : it) 265 { 266 hb_codepoint_t gid = _.first; 267 if (long_offset) 268 for (; last_gid < gid; last_gid++) 269 ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset; 270 else 271 for (; last_gid < gid; last_gid++) 272 ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2; 273 274 if (idx >= glyph_variations.length) return_trace (false); 275 if (!cur_glyph->serialize (c, true, glyph_variations[idx])) return_trace (false); 276 TupleVariationData* next_glyph = c->start_embed<TupleVariationData> (); 277 glyph_offset += (char *) next_glyph - (char *) cur_glyph; 278 279 if (long_offset) 280 ((HBUINT32 *) glyph_var_data_offsets)[gid] = glyph_offset; 281 else 282 ((HBUINT16 *) glyph_var_data_offsets)[gid] = glyph_offset / 2; 283 284 last_gid++; 285 idx++; 286 cur_glyph = next_glyph; 287 } 288 289 if (long_offset) 290 for (; last_gid < num_glyphs; last_gid++) 291 ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset; 292 else 293 for (; last_gid < num_glyphs; last_gid++) 294 ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2; 295 return_trace (true); 296 } 297 }; 298 299 struct gvar 300 { 301 static constexpr hb_tag_t tableTag = HB_OT_TAG_gvar; 302 sanitize_shallowOT::gvar303 bool sanitize_shallow (hb_sanitize_context_t *c) const 304 { 305 TRACE_SANITIZE (this); 306 return_trace (c->check_struct (this) && 307 hb_barrier () && 308 (version.major == 1) && 309 sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) && 310 (is_long_offset () ? 311 c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) : 312 c->check_array (get_short_offset_array (), c->get_num_glyphs () + 1))); 313 } 314 315 /* GlyphVariationData not sanitized here; must be checked while accessing each glyph variation data */ sanitizeOT::gvar316 bool sanitize (hb_sanitize_context_t *c) const 317 { return sanitize_shallow (c); } 318 decompile_glyph_variationsOT::gvar319 bool decompile_glyph_variations (hb_subset_context_t *c, 320 glyph_variations_t& glyph_vars /* OUT */) const 321 { 322 hb_hashmap_t<hb_codepoint_t, hb_bytes_t> new_gid_var_data_map; 323 auto it = hb_iter (c->plan->new_to_old_gid_list); 324 if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) 325 { 326 new_gid_var_data_map.set (0, hb_bytes_t ()); 327 it++; 328 } 329 330 for (auto &_ : it) 331 { 332 hb_codepoint_t new_gid = _.first; 333 hb_codepoint_t old_gid = _.second; 334 hb_bytes_t var_data_bytes = get_glyph_var_data_bytes (c->source_blob, glyphCountX, old_gid); 335 new_gid_var_data_map.set (new_gid, var_data_bytes); 336 } 337 338 if (new_gid_var_data_map.in_error ()) return false; 339 340 hb_array_t<const F2DOT14> shared_tuples = (this+sharedTuples).as_array ((unsigned) sharedTupleCount * (unsigned) axisCount); 341 return glyph_vars.create_from_glyphs_var_data (axisCount, shared_tuples, c->plan, new_gid_var_data_map); 342 } 343 344 template<typename Iterator, 345 hb_requires (hb_is_iterator (Iterator))> serializeOT::gvar346 bool serialize (hb_serialize_context_t *c, 347 const glyph_variations_t& glyph_vars, 348 Iterator it, 349 unsigned axis_count, 350 unsigned num_glyphs, 351 bool force_long_offsets) const 352 { 353 TRACE_SERIALIZE (this); 354 gvar *out = c->allocate_min<gvar> (); 355 if (unlikely (!out)) return_trace (false); 356 357 out->version.major = 1; 358 out->version.minor = 0; 359 out->axisCount = axis_count; 360 out->glyphCountX = hb_min (0xFFFFu, num_glyphs); 361 362 unsigned glyph_var_data_size = glyph_vars.compiled_byte_size (); 363 /* According to the spec: If the short format (Offset16) is used for offsets, 364 * the value stored is the offset divided by 2, so the maximum data size should 365 * be 2 * 0xFFFFu, which is 0x1FFFEu */ 366 bool long_offset = glyph_var_data_size > 0x1FFFEu || force_long_offsets; 367 out->flags = long_offset ? 1 : 0; 368 369 HBUINT8 *glyph_var_data_offsets = c->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false); 370 if (!glyph_var_data_offsets) return_trace (false); 371 372 /* shared tuples */ 373 unsigned shared_tuple_count = glyph_vars.compiled_shared_tuples_count (); 374 out->sharedTupleCount = shared_tuple_count; 375 376 if (!shared_tuple_count) 377 out->sharedTuples = 0; 378 else 379 { 380 hb_array_t<const char> shared_tuples = glyph_vars.compiled_shared_tuples.as_array ().copy (c); 381 if (!shared_tuples.arrayZ) return_trace (false); 382 out->sharedTuples = shared_tuples.arrayZ - (char *) out; 383 } 384 385 char *glyph_var_data = c->start_embed<char> (); 386 if (!glyph_var_data) return_trace (false); 387 out->dataZ = glyph_var_data - (char *) out; 388 389 return_trace (glyph_vars.serialize_glyph_var_data (c, it, long_offset, num_glyphs, 390 (char *) glyph_var_data_offsets)); 391 } 392 instantiateOT::gvar393 bool instantiate (hb_subset_context_t *c) const 394 { 395 TRACE_SUBSET (this); 396 glyph_variations_t glyph_vars; 397 if (!decompile_glyph_variations (c, glyph_vars)) 398 return_trace (false); 399 400 if (!glyph_vars.instantiate (c->plan)) return_trace (false); 401 if (!glyph_vars.compile_bytes (c->plan->axes_index_map, c->plan->axes_old_index_tag_map)) 402 return_trace (false); 403 404 unsigned axis_count = c->plan->axes_index_map.get_population (); 405 unsigned num_glyphs = c->plan->num_output_glyphs (); 406 auto it = hb_iter (c->plan->new_to_old_gid_list); 407 408 bool force_long_offsets = false; 409 #ifdef HB_EXPERIMENTAL_API 410 force_long_offsets = c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS; 411 #endif 412 return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs, force_long_offsets)); 413 } 414 subsetOT::gvar415 bool subset (hb_subset_context_t *c) const 416 { 417 TRACE_SUBSET (this); 418 if (c->plan->all_axes_pinned) 419 return_trace (false); 420 421 if (c->plan->normalized_coords) 422 return_trace (instantiate (c)); 423 424 unsigned glyph_count = version.to_int () ? c->plan->source->get_num_glyphs () : 0; 425 426 gvar *out = c->serializer->allocate_min<gvar> (); 427 if (unlikely (!out)) return_trace (false); 428 429 out->version.major = 1; 430 out->version.minor = 0; 431 out->axisCount = axisCount; 432 out->sharedTupleCount = sharedTupleCount; 433 434 unsigned int num_glyphs = c->plan->num_output_glyphs (); 435 out->glyphCountX = hb_min (0xFFFFu, num_glyphs); 436 437 auto it = hb_iter (c->plan->new_to_old_gid_list); 438 if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) 439 it++; 440 unsigned int subset_data_size = 0; 441 for (auto &_ : it) 442 { 443 hb_codepoint_t old_gid = _.second; 444 subset_data_size += get_glyph_var_data_bytes (c->source_blob, glyph_count, old_gid).length; 445 } 446 447 /* According to the spec: If the short format (Offset16) is used for offsets, 448 * the value stored is the offset divided by 2, so the maximum data size should 449 * be 2 * 0xFFFFu, which is 0x1FFFEu */ 450 bool long_offset = subset_data_size > 0x1FFFEu; 451 #ifdef HB_EXPERIMENTAL_API 452 long_offset = long_offset || (c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS); 453 #endif 454 out->flags = long_offset ? 1 : 0; 455 456 HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false); 457 if (!subset_offsets) return_trace (false); 458 459 /* shared tuples */ 460 if (!sharedTupleCount || !sharedTuples) 461 out->sharedTuples = 0; 462 else 463 { 464 unsigned int shared_tuple_size = F2DOT14::static_size * axisCount * sharedTupleCount; 465 F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size); 466 if (!tuples) return_trace (false); 467 out->sharedTuples = (char *) tuples - (char *) out; 468 hb_memcpy (tuples, this+sharedTuples, shared_tuple_size); 469 } 470 471 /* This ordering relative to the shared tuples array, which puts the glyphVariationData 472 last in the table, is required when HB_SUBSET_FLAGS_IFTB_REQUIREMENTS is set */ 473 char *subset_data = c->serializer->allocate_size<char> (subset_data_size, false); 474 if (!subset_data) return_trace (false); 475 out->dataZ = subset_data - (char *) out; 476 477 478 if (long_offset) 479 { 480 ((HBUINT32 *) subset_offsets)[0] = 0; 481 subset_offsets += 4; 482 } 483 else 484 { 485 ((HBUINT16 *) subset_offsets)[0] = 0; 486 subset_offsets += 2; 487 } 488 unsigned int glyph_offset = 0; 489 490 hb_codepoint_t last = 0; 491 it = hb_iter (c->plan->new_to_old_gid_list); 492 if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) 493 it++; 494 for (auto &_ : it) 495 { 496 hb_codepoint_t gid = _.first; 497 hb_codepoint_t old_gid = _.second; 498 499 if (long_offset) 500 for (; last < gid; last++) 501 ((HBUINT32 *) subset_offsets)[last] = glyph_offset; 502 else 503 for (; last < gid; last++) 504 ((HBUINT16 *) subset_offsets)[last] = glyph_offset / 2; 505 506 hb_bytes_t var_data_bytes = get_glyph_var_data_bytes (c->source_blob, 507 glyph_count, 508 old_gid); 509 510 hb_memcpy (subset_data, var_data_bytes.arrayZ, var_data_bytes.length); 511 subset_data += var_data_bytes.length; 512 glyph_offset += var_data_bytes.length; 513 514 if (long_offset) 515 ((HBUINT32 *) subset_offsets)[gid] = glyph_offset; 516 else 517 ((HBUINT16 *) subset_offsets)[gid] = glyph_offset / 2; 518 519 last++; // Skip over gid 520 } 521 522 if (long_offset) 523 for (; last < num_glyphs; last++) 524 ((HBUINT32 *) subset_offsets)[last] = glyph_offset; 525 else 526 for (; last < num_glyphs; last++) 527 ((HBUINT16 *) subset_offsets)[last] = glyph_offset / 2; 528 529 return_trace (true); 530 } 531 532 protected: get_glyph_var_data_bytesOT::gvar533 const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob, 534 unsigned glyph_count, 535 hb_codepoint_t glyph) const 536 { 537 unsigned start_offset = get_offset (glyph_count, glyph); 538 unsigned end_offset = get_offset (glyph_count, glyph+1); 539 if (unlikely (end_offset < start_offset)) return hb_bytes_t (); 540 unsigned length = end_offset - start_offset; 541 hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length); 542 return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t (); 543 } 544 is_long_offsetOT::gvar545 bool is_long_offset () const { return flags & 1; } 546 get_offsetOT::gvar547 unsigned get_offset (unsigned glyph_count, unsigned i) const 548 { 549 if (unlikely (i > glyph_count)) return 0; 550 hb_barrier (); 551 return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2; 552 } 553 get_long_offset_arrayOT::gvar554 const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; } get_short_offset_arrayOT::gvar555 const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; } 556 557 public: 558 struct accelerator_t 559 { accelerator_tOT::gvar::accelerator_t560 accelerator_t (hb_face_t *face) 561 { 562 table = hb_sanitize_context_t ().reference_table<gvar> (face); 563 /* If sanitize failed, set glyphCount to 0. */ 564 glyphCount = table->version.to_int () ? face->get_num_glyphs () : 0; 565 566 /* For shared tuples that only have one axis active, shared the index of 567 * that axis as a cache. This will speed up caclulate_scalar() a lot 568 * for fonts with lots of axes and many "monovar" tuples. */ 569 hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * table->axisCount); 570 unsigned count = table->sharedTupleCount; 571 if (unlikely (!shared_tuple_active_idx.resize (count, false))) return; 572 unsigned axis_count = table->axisCount; 573 for (unsigned i = 0; i < count; i++) 574 { 575 hb_array_t<const F2DOT14> tuple = shared_tuples.sub_array (axis_count * i, axis_count); 576 int idx1 = -1, idx2 = -1; 577 for (unsigned j = 0; j < axis_count; j++) 578 { 579 const F2DOT14 &peak = tuple.arrayZ[j]; 580 if (peak.to_int () != 0) 581 { 582 if (idx1 == -1) 583 idx1 = j; 584 else if (idx2 == -1) 585 idx2 = j; 586 else 587 { 588 idx1 = idx2 = -1; 589 break; 590 } 591 } 592 } 593 shared_tuple_active_idx.arrayZ[i] = {idx1, idx2}; 594 } 595 } ~accelerator_tOT::gvar::accelerator_t596 ~accelerator_t () { table.destroy (); } 597 598 private: 599 infer_deltaOT::gvar::accelerator_t600 static float infer_delta (const hb_array_t<contour_point_t> points, 601 const hb_array_t<contour_point_t> deltas, 602 unsigned int target, unsigned int prev, unsigned int next, 603 float contour_point_t::*m) 604 { 605 float target_val = points.arrayZ[target].*m; 606 float prev_val = points.arrayZ[prev].*m; 607 float next_val = points.arrayZ[next].*m; 608 float prev_delta = deltas.arrayZ[prev].*m; 609 float next_delta = deltas.arrayZ[next].*m; 610 611 if (prev_val == next_val) 612 return (prev_delta == next_delta) ? prev_delta : 0.f; 613 else if (target_val <= hb_min (prev_val, next_val)) 614 return (prev_val < next_val) ? prev_delta : next_delta; 615 else if (target_val >= hb_max (prev_val, next_val)) 616 return (prev_val > next_val) ? prev_delta : next_delta; 617 618 /* linear interpolation */ 619 float r = (target_val - prev_val) / (next_val - prev_val); 620 return prev_delta + r * (next_delta - prev_delta); 621 } 622 next_indexOT::gvar::accelerator_t623 static unsigned int next_index (unsigned int i, unsigned int start, unsigned int end) 624 { return (i >= end) ? start : (i + 1); } 625 626 public: apply_deltas_to_pointsOT::gvar::accelerator_t627 bool apply_deltas_to_points (hb_codepoint_t glyph, 628 hb_array_t<const int> coords, 629 const hb_array_t<contour_point_t> points, 630 bool phantom_only = false) const 631 { 632 if (unlikely (glyph >= glyphCount)) return true; 633 634 hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyphCount, glyph); 635 if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true; 636 hb_vector_t<unsigned int> shared_indices; 637 GlyphVariationData::tuple_iterator_t iterator; 638 if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount, 639 var_data_bytes.arrayZ, 640 shared_indices, &iterator)) 641 return true; /* so isn't applied at all */ 642 643 /* Save original points for inferred delta calculation */ 644 contour_point_vector_t orig_points_vec; // Populated lazily 645 auto orig_points = orig_points_vec.as_array (); 646 647 /* flag is used to indicate referenced point */ 648 contour_point_vector_t deltas_vec; // Populated lazily 649 auto deltas = deltas_vec.as_array (); 650 651 hb_vector_t<unsigned> end_points; // Populated lazily 652 653 unsigned num_coords = table->axisCount; 654 hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * num_coords); 655 656 hb_vector_t<unsigned int> private_indices; 657 hb_vector_t<int> x_deltas; 658 hb_vector_t<int> y_deltas; 659 unsigned count = points.length; 660 bool flush = false; 661 do 662 { 663 float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples, 664 &shared_tuple_active_idx); 665 if (scalar == 0.f) continue; 666 const HBUINT8 *p = iterator.get_serialized_data (); 667 unsigned int length = iterator.current_tuple->get_data_size (); 668 if (unlikely (!iterator.var_data_bytes.check_range (p, length))) 669 return false; 670 671 if (!deltas) 672 { 673 if (unlikely (!deltas_vec.resize (count, false))) return false; 674 deltas = deltas_vec.as_array (); 675 hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, 676 (phantom_only ? 4 : count) * sizeof (deltas[0])); 677 } 678 679 const HBUINT8 *end = p + length; 680 681 bool has_private_points = iterator.current_tuple->has_private_points (); 682 if (has_private_points && 683 !GlyphVariationData::decompile_points (p, private_indices, end)) 684 return false; 685 const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices; 686 687 bool apply_to_all = (indices.length == 0); 688 unsigned int num_deltas = apply_to_all ? points.length : indices.length; 689 if (unlikely (!x_deltas.resize (num_deltas, false))) return false; 690 if (unlikely (!GlyphVariationData::decompile_deltas (p, x_deltas, end))) return false; 691 if (unlikely (!y_deltas.resize (num_deltas, false))) return false; 692 if (unlikely (!GlyphVariationData::decompile_deltas (p, y_deltas, end))) return false; 693 694 if (!apply_to_all) 695 { 696 if (!orig_points && !phantom_only) 697 { 698 orig_points_vec.extend (points); 699 if (unlikely (orig_points_vec.in_error ())) return false; 700 orig_points = orig_points_vec.as_array (); 701 } 702 703 if (flush) 704 { 705 for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) 706 points.arrayZ[i].translate (deltas.arrayZ[i]); 707 flush = false; 708 709 } 710 hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, 711 (phantom_only ? 4 : count) * sizeof (deltas[0])); 712 } 713 714 if (HB_OPTIMIZE_SIZE_VAL) 715 { 716 for (unsigned int i = 0; i < num_deltas; i++) 717 { 718 unsigned int pt_index; 719 if (apply_to_all) 720 pt_index = i; 721 else 722 { 723 pt_index = indices[i]; 724 if (unlikely (pt_index >= deltas.length)) continue; 725 } 726 if (phantom_only && pt_index < count - 4) continue; 727 auto &delta = deltas.arrayZ[pt_index]; 728 delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ 729 delta.x += x_deltas.arrayZ[i] * scalar; 730 delta.y += y_deltas.arrayZ[i] * scalar; 731 } 732 } 733 else 734 { 735 /* Ouch. Four cases... for optimization. */ 736 if (scalar != 1.0f) 737 { 738 if (apply_to_all) 739 for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) 740 { 741 unsigned int pt_index = i; 742 auto &delta = deltas.arrayZ[pt_index]; 743 delta.x += x_deltas.arrayZ[i] * scalar; 744 delta.y += y_deltas.arrayZ[i] * scalar; 745 } 746 else 747 for (unsigned int i = 0; i < num_deltas; i++) 748 { 749 unsigned int pt_index = indices[i]; 750 if (unlikely (pt_index >= deltas.length)) continue; 751 if (phantom_only && pt_index < count - 4) continue; 752 auto &delta = deltas.arrayZ[pt_index]; 753 delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ 754 delta.x += x_deltas.arrayZ[i] * scalar; 755 delta.y += y_deltas.arrayZ[i] * scalar; 756 } 757 } 758 else 759 { 760 if (apply_to_all) 761 for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) 762 { 763 unsigned int pt_index = i; 764 auto &delta = deltas.arrayZ[pt_index]; 765 delta.x += x_deltas.arrayZ[i]; 766 delta.y += y_deltas.arrayZ[i]; 767 } 768 else 769 for (unsigned int i = 0; i < num_deltas; i++) 770 { 771 unsigned int pt_index = indices[i]; 772 if (unlikely (pt_index >= deltas.length)) continue; 773 if (phantom_only && pt_index < count - 4) continue; 774 auto &delta = deltas.arrayZ[pt_index]; 775 delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ 776 delta.x += x_deltas.arrayZ[i]; 777 delta.y += y_deltas.arrayZ[i]; 778 } 779 } 780 } 781 782 /* infer deltas for unreferenced points */ 783 if (!apply_to_all && !phantom_only) 784 { 785 if (!end_points) 786 { 787 for (unsigned i = 0; i < count; ++i) 788 if (points.arrayZ[i].is_end_point) 789 end_points.push (i); 790 if (unlikely (end_points.in_error ())) return false; 791 } 792 793 unsigned start_point = 0; 794 for (unsigned end_point : end_points) 795 { 796 /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */ 797 unsigned unref_count = 0; 798 for (unsigned i = start_point; i < end_point + 1; i++) 799 unref_count += deltas.arrayZ[i].flag; 800 unref_count = (end_point - start_point + 1) - unref_count; 801 802 unsigned j = start_point; 803 if (unref_count == 0 || unref_count > end_point - start_point) 804 goto no_more_gaps; 805 806 for (;;) 807 { 808 /* Locate the next gap of unreferenced points between two referenced points prev and next. 809 * Note that a gap may wrap around at left (start_point) and/or at right (end_point). 810 */ 811 unsigned int prev, next, i; 812 for (;;) 813 { 814 i = j; 815 j = next_index (i, start_point, end_point); 816 if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break; 817 } 818 prev = j = i; 819 for (;;) 820 { 821 i = j; 822 j = next_index (i, start_point, end_point); 823 if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break; 824 } 825 next = j; 826 /* Infer deltas for all unref points in the gap between prev and next */ 827 i = prev; 828 for (;;) 829 { 830 i = next_index (i, start_point, end_point); 831 if (i == next) break; 832 deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x); 833 deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y); 834 if (--unref_count == 0) goto no_more_gaps; 835 } 836 } 837 no_more_gaps: 838 start_point = end_point + 1; 839 } 840 } 841 842 flush = true; 843 844 } while (iterator.move_to_next ()); 845 846 if (flush) 847 { 848 for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) 849 points.arrayZ[i].translate (deltas.arrayZ[i]); 850 } 851 852 return true; 853 } 854 get_axis_countOT::gvar::accelerator_t855 unsigned int get_axis_count () const { return table->axisCount; } 856 857 private: 858 hb_blob_ptr_t<gvar> table; 859 unsigned glyphCount; 860 hb_vector_t<hb_pair_t<int, int>> shared_tuple_active_idx; 861 }; 862 863 protected: 864 FixedVersion<>version; /* Version number of the glyph variations table 865 * Set to 0x00010000u. */ 866 HBUINT16 axisCount; /* The number of variation axes for this font. This must be 867 * the same number as axisCount in the 'fvar' table. */ 868 HBUINT16 sharedTupleCount; 869 /* The number of shared tuple records. Shared tuple records 870 * can be referenced within glyph variation data tables for 871 * multiple glyphs, as opposed to other tuple records stored 872 * directly within a glyph variation data table. */ 873 NNOffset32To<UnsizedArrayOf<F2DOT14>> 874 sharedTuples; /* Offset from the start of this table to the shared tuple records. 875 * Array of tuple records shared across all glyph variation data tables. */ 876 HBUINT16 glyphCountX; /* The number of glyphs in this font. This must match the number of 877 * glyphs stored elsewhere in the font. */ 878 HBUINT16 flags; /* Bit-field that gives the format of the offset array that follows. 879 * If bit 0 is clear, the offsets are uint16; if bit 0 is set, the 880 * offsets are uint32. */ 881 Offset32To<GlyphVariationData> 882 dataZ; /* Offset from the start of this table to the array of 883 * GlyphVariationData tables. */ 884 UnsizedArrayOf<HBUINT8> 885 offsetZ; /* Offsets from the start of the GlyphVariationData array 886 * to each GlyphVariationData table. */ 887 public: 888 DEFINE_SIZE_ARRAY (20, offsetZ); 889 }; 890 891 struct gvar_accelerator_t : gvar::accelerator_t { gvar_accelerator_tOT::gvar_accelerator_t892 gvar_accelerator_t (hb_face_t *face) : gvar::accelerator_t (face) {} 893 }; 894 895 } /* namespace OT */ 896 897 #endif /* HB_OT_VAR_GVAR_TABLE_HH */ 898