1 /* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2012,2018 Google, Inc. 4 * Copyright © 2019 Facebook, Inc. 5 * 6 * This is part of HarfBuzz, a text shaping library. 7 * 8 * Permission is hereby granted, without written agreement and without 9 * license or royalty fees, to use, copy, modify, and distribute this 10 * software and its documentation for any purpose, provided that the 11 * above copyright notice and the following two paragraphs appear in 12 * all copies of this software. 13 * 14 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 15 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 16 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 17 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 18 * DAMAGE. 19 * 20 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 21 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 22 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 23 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 24 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 25 * 26 * Red Hat Author(s): Behdad Esfahbod 27 * Google Author(s): Behdad Esfahbod 28 * Facebook Author(s): Behdad Esfahbod 29 */ 30 31 #ifndef HB_SERIALIZE_HH 32 #define HB_SERIALIZE_HH 33 34 #include "hb.hh" 35 #include "hb-blob.hh" 36 #include "hb-map.hh" 37 #include "hb-pool.hh" 38 39 40 /* 41 * Serialize 42 */ 43 44 enum hb_serialize_error_t { 45 HB_SERIALIZE_ERROR_NONE = 0x00000000u, 46 HB_SERIALIZE_ERROR_OTHER = 0x00000001u, 47 HB_SERIALIZE_ERROR_OFFSET_OVERFLOW = 0x00000002u, 48 HB_SERIALIZE_ERROR_OUT_OF_ROOM = 0x00000004u, 49 HB_SERIALIZE_ERROR_INT_OVERFLOW = 0x00000008u, 50 HB_SERIALIZE_ERROR_ARRAY_OVERFLOW = 0x00000010u 51 }; 52 HB_MARK_AS_FLAG_T (hb_serialize_error_t); 53 54 struct hb_serialize_context_t 55 { 56 typedef unsigned objidx_t; 57 58 enum whence_t { 59 Head, /* Relative to the current object head (default). */ 60 Tail, /* Relative to the current object tail after packed. */ 61 Absolute /* Absolute: from the start of the serialize buffer. */ 62 }; 63 64 65 66 struct object_t 67 { finihb_serialize_context_t::object_t68 void fini () { links.fini (); } 69 operator ==hb_serialize_context_t::object_t70 bool operator == (const object_t &o) const 71 { 72 return (tail - head == o.tail - o.head) 73 && (links.length == o.links.length) 74 && 0 == hb_memcmp (head, o.head, tail - head) 75 && links.as_bytes () == o.links.as_bytes (); 76 } hashhb_serialize_context_t::object_t77 uint32_t hash () const 78 { 79 return hb_bytes_t (head, tail - head).hash () ^ 80 links.as_bytes ().hash (); 81 } 82 83 struct link_t 84 { 85 unsigned width: 3; 86 bool is_signed: 1; 87 unsigned whence: 2; 88 unsigned position: 28; 89 unsigned bias; 90 objidx_t objidx; 91 }; 92 93 char *head; 94 char *tail; 95 hb_vector_t<link_t> links; 96 object_t *next; 97 }; 98 99 struct snapshot_t 100 { 101 char *head; 102 char *tail; 103 object_t *current; // Just for sanity check 104 unsigned num_links; 105 hb_serialize_error_t errors; 106 }; 107 snapshothb_serialize_context_t108 snapshot_t snapshot () 109 { return snapshot_t { head, tail, current, current->links.length, errors }; } 110 hb_serialize_context_thb_serialize_context_t111 hb_serialize_context_t (void *start_, unsigned int size) : 112 start ((char *) start_), 113 end (start + size), 114 current (nullptr) 115 { reset (); } ~hb_serialize_context_thb_serialize_context_t116 ~hb_serialize_context_t () { fini (); } 117 finihb_serialize_context_t118 void fini () 119 { 120 for (object_t *_ : ++hb_iter (packed)) _->fini (); 121 packed.fini (); 122 this->packed_map.fini (); 123 124 while (current) 125 { 126 auto *_ = current; 127 current = current->next; 128 _->fini (); 129 } 130 object_pool.fini (); 131 } 132 in_errorhb_serialize_context_t133 bool in_error () const { return bool (errors); } 134 successfulhb_serialize_context_t135 bool successful () const { return !bool (errors); } 136 ran_out_of_roomhb_serialize_context_t137 HB_NODISCARD bool ran_out_of_room () const { return errors & HB_SERIALIZE_ERROR_OUT_OF_ROOM; } offset_overflowhb_serialize_context_t138 HB_NODISCARD bool offset_overflow () const { return errors & HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; } only_offset_overflowhb_serialize_context_t139 HB_NODISCARD bool only_offset_overflow () const { return errors == HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; } only_overflowhb_serialize_context_t140 HB_NODISCARD bool only_overflow () const 141 { 142 return errors == HB_SERIALIZE_ERROR_OFFSET_OVERFLOW 143 || errors == HB_SERIALIZE_ERROR_INT_OVERFLOW 144 || errors == HB_SERIALIZE_ERROR_ARRAY_OVERFLOW; 145 } 146 resethb_serialize_context_t147 void reset (void *start_, unsigned int size) 148 { 149 start = (char*) start_; 150 end = start + size; 151 reset (); 152 current = nullptr; 153 } 154 resethb_serialize_context_t155 void reset () 156 { 157 this->errors = HB_SERIALIZE_ERROR_NONE; 158 this->head = this->start; 159 this->tail = this->end; 160 this->debug_depth = 0; 161 162 fini (); 163 this->packed.push (nullptr); 164 this->packed_map.init (); 165 } 166 check_successhb_serialize_context_t167 bool check_success (bool success, 168 hb_serialize_error_t err_type = HB_SERIALIZE_ERROR_OTHER) 169 { 170 return successful () 171 && (success || err (err_type)); 172 } 173 174 template <typename T1, typename T2> check_equalhb_serialize_context_t175 bool check_equal (T1 &&v1, T2 &&v2, hb_serialize_error_t err_type) 176 { 177 if ((long long) v1 != (long long) v2) 178 { 179 return err (err_type); 180 } 181 return true; 182 } 183 184 template <typename T1, typename T2> check_assignhb_serialize_context_t185 bool check_assign (T1 &v1, T2 &&v2, hb_serialize_error_t err_type) 186 { return check_equal (v1 = v2, v2, err_type); } 187 propagate_errorhb_serialize_context_t188 template <typename T> bool propagate_error (T &&obj) 189 { return check_success (!hb_deref (obj).in_error ()); } 190 propagate_errorhb_serialize_context_t191 template <typename T1, typename... Ts> bool propagate_error (T1 &&o1, Ts&&... os) 192 { return propagate_error (std::forward<T1> (o1)) && 193 propagate_error (std::forward<Ts> (os)...); } 194 195 /* To be called around main operation. */ 196 template <typename Type> start_serializehb_serialize_context_t197 Type *start_serialize () 198 { 199 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1, 200 "start [%p..%p] (%lu bytes)", 201 this->start, this->end, 202 (unsigned long) (this->end - this->start)); 203 204 assert (!current); 205 return push<Type> (); 206 } end_serializehb_serialize_context_t207 void end_serialize () 208 { 209 DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1, 210 "end [%p..%p] serialized %u bytes; %s", 211 this->start, this->end, 212 (unsigned) (this->head - this->start), 213 successful () ? "successful" : "UNSUCCESSFUL"); 214 215 propagate_error (packed, packed_map); 216 217 if (unlikely (!current)) return; 218 if (unlikely (in_error())) 219 { 220 // Offset overflows that occur before link resolution cannot be handled 221 // by repacking, so set a more general error. 222 if (offset_overflow ()) err (HB_SERIALIZE_ERROR_OTHER); 223 return; 224 } 225 226 assert (!current->next); 227 228 /* Only "pack" if there exist other objects... Otherwise, don't bother. 229 * Saves a move. */ 230 if (packed.length <= 1) 231 return; 232 233 pop_pack (false); 234 235 resolve_links (); 236 } 237 238 template <typename Type = void> pushhb_serialize_context_t239 Type *push () 240 { 241 if (unlikely (in_error ())) return start_embed<Type> (); 242 243 object_t *obj = object_pool.alloc (); 244 if (unlikely (!obj)) 245 check_success (false); 246 else 247 { 248 obj->head = head; 249 obj->tail = tail; 250 obj->next = current; 251 current = obj; 252 } 253 return start_embed<Type> (); 254 } pop_discardhb_serialize_context_t255 void pop_discard () 256 { 257 object_t *obj = current; 258 if (unlikely (!obj)) return; 259 if (unlikely (in_error())) return; 260 261 current = current->next; 262 revert (obj->head, obj->tail); 263 obj->fini (); 264 object_pool.release (obj); 265 } 266 267 /* Set share to false when an object is unlikely sharable with others 268 * so not worth an attempt, or a contiguous table is serialized as 269 * multiple consecutive objects in the reverse order so can't be shared. 270 */ pop_packhb_serialize_context_t271 objidx_t pop_pack (bool share=true) 272 { 273 object_t *obj = current; 274 if (unlikely (!obj)) return 0; 275 if (unlikely (in_error())) return 0; 276 277 current = current->next; 278 obj->tail = head; 279 obj->next = nullptr; 280 unsigned len = obj->tail - obj->head; 281 head = obj->head; /* Rewind head. */ 282 283 if (!len) 284 { 285 assert (!obj->links.length); 286 return 0; 287 } 288 289 objidx_t objidx; 290 if (share) 291 { 292 objidx = packed_map.get (obj); 293 if (objidx) 294 { 295 obj->fini (); 296 return objidx; 297 } 298 } 299 300 tail -= len; 301 memmove (tail, obj->head, len); 302 303 obj->head = tail; 304 obj->tail = tail + len; 305 306 packed.push (obj); 307 308 if (unlikely (!propagate_error (packed))) 309 { 310 /* Obj wasn't successfully added to packed, so clean it up otherwise its 311 * links will be leaked. When we use constructor/destructors properly, we 312 * can remove these. */ 313 obj->fini (); 314 return 0; 315 } 316 317 objidx = packed.length - 1; 318 319 if (share) packed_map.set (obj, objidx); 320 propagate_error (packed_map); 321 322 return objidx; 323 } 324 reverthb_serialize_context_t325 void revert (snapshot_t snap) 326 { 327 // Overflows that happened after the snapshot will be erased by the revert. 328 if (unlikely (in_error () && !only_overflow ())) return; 329 assert (snap.current == current); 330 current->links.shrink (snap.num_links); 331 errors = snap.errors; 332 revert (snap.head, snap.tail); 333 } 334 reverthb_serialize_context_t335 void revert (char *snap_head, 336 char *snap_tail) 337 { 338 if (unlikely (in_error ())) return; 339 assert (snap_head <= head); 340 assert (tail <= snap_tail); 341 head = snap_head; 342 tail = snap_tail; 343 discard_stale_objects (); 344 } 345 discard_stale_objectshb_serialize_context_t346 void discard_stale_objects () 347 { 348 if (unlikely (in_error ())) return; 349 while (packed.length > 1 && 350 packed.tail ()->head < tail) 351 { 352 packed_map.del (packed.tail ()); 353 assert (!packed.tail ()->next); 354 packed.tail ()->fini (); 355 packed.pop (); 356 } 357 if (packed.length > 1) 358 assert (packed.tail ()->head == tail); 359 } 360 361 // Adds a virtual link from the current object to objidx. A virtual link is not associated with 362 // an actual offset field. They are solely used to enforce ordering constraints between objects. 363 // Adding a virtual link from object a to object b will ensure that object b is always packed after 364 // object a in the final serialized order. 365 // 366 // This is useful in certain situtations where there needs to be a specific ordering in the 367 // final serialization. Such as when platform bugs require certain orderings, or to provide 368 // guidance to the repacker for better offset overflow resolution. add_virtual_linkhb_serialize_context_t369 void add_virtual_link (objidx_t objidx) 370 { 371 if (unlikely (in_error ())) return; 372 373 if (!objidx) 374 return; 375 376 assert (current); 377 378 auto& link = *current->links.push (); 379 if (current->links.in_error ()) 380 err (HB_SERIALIZE_ERROR_OTHER); 381 382 link.width = 0; 383 link.objidx = objidx; 384 link.is_signed = 0; 385 link.whence = 0; 386 link.position = 0; 387 link.bias = 0; 388 } 389 390 template <typename T> add_linkhb_serialize_context_t391 void add_link (T &ofs, objidx_t objidx, 392 whence_t whence = Head, 393 unsigned bias = 0) 394 { 395 if (unlikely (in_error ())) return; 396 397 if (!objidx) 398 return; 399 400 assert (current); 401 assert (current->head <= (const char *) &ofs); 402 403 auto& link = *current->links.push (); 404 if (current->links.in_error ()) 405 err (HB_SERIALIZE_ERROR_OTHER); 406 407 link.width = sizeof (T); 408 link.objidx = objidx; 409 if (unlikely (!sizeof (T))) 410 { 411 // This link is not associated with an actual offset and exists merely to enforce 412 // an ordering constraint. 413 link.is_signed = 0; 414 link.whence = 0; 415 link.position = 0; 416 link.bias = 0; 417 return; 418 } 419 420 link.is_signed = std::is_signed<hb_unwrap_type (T)>::value; 421 link.whence = (unsigned) whence; 422 link.position = (const char *) &ofs - current->head; 423 link.bias = bias; 424 } 425 to_biashb_serialize_context_t426 unsigned to_bias (const void *base) const 427 { 428 if (unlikely (in_error ())) return 0; 429 if (!base) return 0; 430 assert (current); 431 assert (current->head <= (const char *) base); 432 return (const char *) base - current->head; 433 } 434 resolve_linkshb_serialize_context_t435 void resolve_links () 436 { 437 if (unlikely (in_error ())) return; 438 439 assert (!current); 440 assert (packed.length > 1); 441 442 for (const object_t* parent : ++hb_iter (packed)) 443 for (const object_t::link_t &link : parent->links) 444 { 445 if (unlikely (!link.width)) continue; // Don't need to resolve virtual offsets 446 447 const object_t* child = packed[link.objidx]; 448 if (unlikely (!child)) { err (HB_SERIALIZE_ERROR_OTHER); return; } 449 unsigned offset = 0; 450 switch ((whence_t) link.whence) { 451 case Head: offset = child->head - parent->head; break; 452 case Tail: offset = child->head - parent->tail; break; 453 case Absolute: offset = (head - start) + (child->head - tail); break; 454 } 455 456 assert (offset >= link.bias); 457 offset -= link.bias; 458 if (link.is_signed) 459 { 460 assert (link.width == 2 || link.width == 4); 461 if (link.width == 4) 462 assign_offset<int32_t> (parent, link, offset); 463 else 464 assign_offset<int16_t> (parent, link, offset); 465 } 466 else 467 { 468 assert (link.width == 2 || link.width == 3 || link.width == 4); 469 if (link.width == 4) 470 assign_offset<uint32_t> (parent, link, offset); 471 else if (link.width == 3) 472 assign_offset<uint32_t, 3> (parent, link, offset); 473 else 474 assign_offset<uint16_t> (parent, link, offset); 475 } 476 } 477 } 478 lengthhb_serialize_context_t479 unsigned int length () const 480 { 481 if (unlikely (!current)) return 0; 482 return this->head - current->head; 483 } 484 alignhb_serialize_context_t485 void align (unsigned int alignment) 486 { 487 unsigned int l = length () % alignment; 488 if (l) 489 allocate_size<void> (alignment - l); 490 } 491 492 template <typename Type = void> start_embedhb_serialize_context_t493 Type *start_embed (const Type *obj HB_UNUSED = nullptr) const 494 { return reinterpret_cast<Type *> (this->head); } 495 template <typename Type> start_embedhb_serialize_context_t496 Type *start_embed (const Type &obj) const 497 { return start_embed (hb_addressof (obj)); } 498 errhb_serialize_context_t499 bool err (hb_serialize_error_t err_type) 500 { 501 return !bool ((errors = (errors | err_type))); 502 } 503 504 template <typename Type> allocate_sizehb_serialize_context_t505 Type *allocate_size (size_t size) 506 { 507 if (unlikely (in_error ())) return nullptr; 508 509 if (unlikely (size > INT_MAX || this->tail - this->head < ptrdiff_t (size))) 510 { 511 err (HB_SERIALIZE_ERROR_OUT_OF_ROOM); 512 return nullptr; 513 } 514 hb_memset (this->head, 0, size); 515 char *ret = this->head; 516 this->head += size; 517 return reinterpret_cast<Type *> (ret); 518 } 519 520 template <typename Type> allocate_minhb_serialize_context_t521 Type *allocate_min () 522 { return this->allocate_size<Type> (Type::min_size); } 523 524 template <typename Type> embedhb_serialize_context_t525 Type *embed (const Type *obj) 526 { 527 unsigned int size = obj->get_size (); 528 Type *ret = this->allocate_size<Type> (size); 529 if (unlikely (!ret)) return nullptr; 530 memcpy (ret, obj, size); 531 return ret; 532 } 533 template <typename Type> embedhb_serialize_context_t534 Type *embed (const Type &obj) 535 { return embed (hb_addressof (obj)); } 536 537 template <typename Type, typename ...Ts> auto _copyhb_serialize_context_t538 _copy (const Type &src, hb_priority<1>, Ts&&... ds) HB_RETURN 539 (Type *, src.copy (this, std::forward<Ts> (ds)...)) 540 541 template <typename Type> auto 542 _copy (const Type &src, hb_priority<0>) -> decltype (&(hb_declval<Type> () = src)) 543 { 544 Type *ret = this->allocate_size<Type> (sizeof (Type)); 545 if (unlikely (!ret)) return nullptr; 546 *ret = src; 547 return ret; 548 } 549 550 /* Like embed, but active: calls obj.operator=() or obj.copy() to transfer data 551 * instead of memcpy(). */ 552 template <typename Type, typename ...Ts> copyhb_serialize_context_t553 Type *copy (const Type &src, Ts&&... ds) 554 { return _copy (src, hb_prioritize, std::forward<Ts> (ds)...); } 555 template <typename Type, typename ...Ts> copyhb_serialize_context_t556 Type *copy (const Type *src, Ts&&... ds) 557 { return copy (*src, std::forward<Ts> (ds)...); } 558 559 template<typename Iterator, 560 hb_requires (hb_is_iterator (Iterator)), 561 typename ...Ts> copy_allhb_serialize_context_t562 void copy_all (Iterator it, Ts&&... ds) 563 { for (decltype (*it) _ : it) copy (_, std::forward<Ts> (ds)...); } 564 565 template <typename Type> operator <<hb_serialize_context_t566 hb_serialize_context_t& operator << (const Type &obj) & { embed (obj); return *this; } 567 568 template <typename Type> extend_sizehb_serialize_context_t569 Type *extend_size (Type *obj, size_t size) 570 { 571 if (unlikely (in_error ())) return nullptr; 572 573 assert (this->start <= (char *) obj); 574 assert ((char *) obj <= this->head); 575 assert ((size_t) (this->head - (char *) obj) <= size); 576 if (unlikely (((char *) obj + size < (char *) obj) || 577 !this->allocate_size<Type> (((char *) obj) + size - this->head))) return nullptr; 578 return reinterpret_cast<Type *> (obj); 579 } 580 template <typename Type> extend_sizehb_serialize_context_t581 Type *extend_size (Type &obj, size_t size) 582 { return extend_size (hb_addressof (obj), size); } 583 584 template <typename Type> extend_minhb_serialize_context_t585 Type *extend_min (Type *obj) { return extend_size (obj, obj->min_size); } 586 template <typename Type> extend_minhb_serialize_context_t587 Type *extend_min (Type &obj) { return extend_min (hb_addressof (obj)); } 588 589 template <typename Type, typename ...Ts> extendhb_serialize_context_t590 Type *extend (Type *obj, Ts&&... ds) 591 { return extend_size (obj, obj->get_size (std::forward<Ts> (ds)...)); } 592 template <typename Type, typename ...Ts> extendhb_serialize_context_t593 Type *extend (Type &obj, Ts&&... ds) 594 { return extend (hb_addressof (obj), std::forward<Ts> (ds)...); } 595 596 /* Output routines. */ copy_byteshb_serialize_context_t597 hb_bytes_t copy_bytes () const 598 { 599 assert (successful ()); 600 /* Copy both items from head side and tail side... */ 601 unsigned int len = (this->head - this->start) 602 + (this->end - this->tail); 603 604 // If len is zero don't hb_malloc as the memory won't get properly 605 // cleaned up later. 606 if (!len) return hb_bytes_t (); 607 608 char *p = (char *) hb_malloc (len); 609 if (unlikely (!p)) return hb_bytes_t (); 610 611 memcpy (p, this->start, this->head - this->start); 612 memcpy (p + (this->head - this->start), this->tail, this->end - this->tail); 613 return hb_bytes_t (p, len); 614 } 615 template <typename Type> copyhb_serialize_context_t616 Type *copy () const 617 { return reinterpret_cast<Type *> ((char *) copy_bytes ().arrayZ); } copy_blobhb_serialize_context_t618 hb_blob_t *copy_blob () const 619 { 620 hb_bytes_t b = copy_bytes (); 621 return hb_blob_create (b.arrayZ, b.length, 622 HB_MEMORY_MODE_WRITABLE, 623 (char *) b.arrayZ, hb_free); 624 } 625 object_graphhb_serialize_context_t626 const hb_vector_t<object_t *>& object_graph() const 627 { return packed; } 628 629 private: 630 template <typename T, unsigned Size = sizeof (T)> assign_offsethb_serialize_context_t631 void assign_offset (const object_t* parent, const object_t::link_t &link, unsigned offset) 632 { 633 auto &off = * ((BEInt<T, Size> *) (parent->head + link.position)); 634 assert (0 == off); 635 check_assign (off, offset, HB_SERIALIZE_ERROR_OFFSET_OVERFLOW); 636 } 637 638 public: /* TODO Make private. */ 639 char *start, *head, *tail, *end; 640 unsigned int debug_depth; 641 hb_serialize_error_t errors; 642 643 private: 644 645 /* Object memory pool. */ 646 hb_pool_t<object_t> object_pool; 647 648 /* Stack of currently under construction objects. */ 649 object_t *current; 650 651 /* Stack of packed objects. Object 0 is always nil object. */ 652 hb_vector_t<object_t *> packed; 653 654 /* Map view of packed objects. */ 655 hb_hashmap_t<const object_t *, objidx_t, 656 const object_t *, objidx_t, 657 nullptr, 0> packed_map; 658 }; 659 660 #endif /* HB_SERIALIZE_HH */ 661