1 // Copyright 2019 The Abseil Authors. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // https://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 16 #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 17 18 #include <algorithm> 19 #include <cstddef> 20 #include <cstring> 21 #include <iterator> 22 #include <limits> 23 #include <memory> 24 #include <new> 25 #include <type_traits> 26 #include <utility> 27 28 #include "third_party/abseil-cpp/absl/base/attributes.h" 29 #include "third_party/abseil-cpp/absl/base/config.h" 30 #include "third_party/abseil-cpp/absl/base/internal/identity.h" 31 #include "third_party/abseil-cpp/absl/base/macros.h" 32 #include "third_party/abseil-cpp/absl/container/internal/compressed_tuple.h" 33 #include "third_party/abseil-cpp/absl/memory/memory.h" 34 #include "third_party/abseil-cpp/absl/meta/type_traits.h" 35 #include "third_party/abseil-cpp/absl/types/span.h" 36 #include "third_party/tcmalloc/malloc_extension.h" // absl:google3-only(using size-returning new internally) 37 38 namespace absl { 39 ABSL_NAMESPACE_BEGIN 40 namespace inlined_vector_internal { 41 42 // GCC does not deal very well with the below code 43 #if !defined(__clang__) && defined(__GNUC__) 44 #pragma GCC diagnostic push 45 #pragma GCC diagnostic ignored "-Warray-bounds" 46 #endif 47 48 template <typename A> 49 using AllocatorTraits = std::allocator_traits<A>; 50 template <typename A> 51 using ValueType = typename AllocatorTraits<A>::value_type; 52 template <typename A> 53 using SizeType = typename AllocatorTraits<A>::size_type; 54 template <typename A> 55 using Pointer = typename AllocatorTraits<A>::pointer; 56 template <typename A> 57 using ConstPointer = typename AllocatorTraits<A>::const_pointer; 58 template <typename A> 59 using SizeType = typename AllocatorTraits<A>::size_type; 60 template <typename A> 61 using DifferenceType = typename AllocatorTraits<A>::difference_type; 62 template <typename A> 63 using Reference = ValueType<A>&; 64 template <typename A> 65 using ConstReference = const ValueType<A>&; 66 template <typename A> 67 using Iterator = Pointer<A>; 68 template <typename A> 69 using ConstIterator = ConstPointer<A>; 70 template <typename A> 71 using ReverseIterator = typename std::reverse_iterator<Iterator<A>>; 72 template <typename A> 73 using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>; 74 template <typename A> 75 using MoveIterator = typename std::move_iterator<Iterator<A>>; 76 77 template <typename Iterator> 78 using IsAtLeastForwardIterator = std::is_convertible< 79 typename std::iterator_traits<Iterator>::iterator_category, 80 std::forward_iterator_tag>; 81 82 template <typename A> 83 using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>; 84 template <typename A> 85 using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>; 86 87 template <typename A, 88 bool IsTriviallyDestructible = 89 absl::is_trivially_destructible<ValueType<A>>::value && 90 std::is_same<A, std::allocator<ValueType<A>>>::value> 91 struct DestroyAdapter; 92 93 template <typename A> 94 struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> { 95 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 96 SizeType<A> destroy_size) { 97 for (SizeType<A> i = destroy_size; i != 0;) { 98 --i; 99 AllocatorTraits<A>::destroy(allocator, destroy_first + i); 100 } 101 } 102 }; 103 104 template <typename A> 105 struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> { 106 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 107 SizeType<A> destroy_size) { 108 static_cast<void>(allocator); 109 static_cast<void>(destroy_first); 110 static_cast<void>(destroy_size); 111 } 112 }; 113 114 template <typename A> 115 struct Allocation { 116 Pointer<A> data = nullptr; 117 SizeType<A> capacity = 0; 118 }; 119 120 template <typename A, 121 bool IsOverAligned = 122 (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)> 123 struct MallocAdapter { 124 static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) { 125 return {AllocatorTraits<A>::allocate(allocator, requested_capacity), 126 requested_capacity}; 127 } 128 129 static void Deallocate(A& allocator, Pointer<A> pointer, 130 SizeType<A> capacity) { 131 AllocatorTraits<A>::deallocate(allocator, pointer, capacity); 132 } 133 }; 134 135 // absl:google3-begin(using size-returning new internally) 136 template <typename T> 137 struct MallocAdapter<std::allocator<T>, /* IsOverAligned = */ false> { 138 using A = std::allocator<T>; 139 140 ABSL_ATTRIBUTE_NO_SANITIZE_CFI static Allocation<A> Allocate( 141 A& allocator, SizeType<A> requested_capacity) { 142 ABSL_HARDENING_ASSERT(requested_capacity <= 143 AllocatorTraits<A>::max_size(allocator)); 144 SizeType<A> requested_byte_count = 145 requested_capacity * sizeof(ValueType<A>); 146 147 tcmalloc::sized_ptr_t result = 148 __size_returning_new(requested_byte_count); 149 150 // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is required because objects are not 151 // constructed at the time of this cast. 152 Pointer<A> data = reinterpret_cast<Pointer<A>>(result.p); 153 SizeType<A> capacity = result.n / sizeof(ValueType<A>); 154 return {data, capacity}; 155 } 156 157 static void Deallocate(A& /* allocator */, Pointer<A> pointer, 158 SizeType<A> capacity) { 159 SizeType<A> byte_count = capacity * sizeof(ValueType<A>); 160 161 #if defined(__cpp_sized_deallocation) 162 ::operator delete(pointer, byte_count); 163 #else // defined(__cpp_sized_deallocation) 164 ::operator delete(pointer); 165 #endif // defined(__cpp_sized_deallocation) 166 167 static_cast<void>(byte_count); 168 } 169 }; 170 171 #if defined(__cpp_aligned_new) 172 template <typename T> 173 struct MallocAdapter<std::allocator<T>, /* IsOverAligned = */ true> { 174 using A = std::allocator<T>; 175 176 ABSL_ATTRIBUTE_NO_SANITIZE_CFI static Allocation<A> Allocate( 177 A& allocator, SizeType<A> requested_capacity) { 178 ABSL_HARDENING_ASSERT(requested_capacity <= 179 AllocatorTraits<A>::max_size(allocator)); 180 SizeType<A> requested_byte_count = 181 requested_capacity * sizeof(ValueType<A>); 182 183 tcmalloc::sized_ptr_t result = __size_returning_new_aligned( 184 requested_byte_count, std::align_val_t(alignof(ValueType<A>))); 185 186 // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is required because objects are not 187 // constructed at the time of this cast. 188 Pointer<A> data = reinterpret_cast<Pointer<A>>(result.p); 189 SizeType<A> capacity = result.n / sizeof(ValueType<A>); 190 return {data, capacity}; 191 } 192 193 static void Deallocate(A& /* allocator */, Pointer<A> pointer, 194 SizeType<A> capacity) { 195 SizeType<A> byte_count = capacity * sizeof(ValueType<A>); 196 197 #if defined(__cpp_sized_deallocation) 198 ::operator delete(pointer, byte_count, 199 std::align_val_t(alignof(ValueType<A>))); 200 #else // defined(__cpp_sized_deallocation) 201 ::operator delete(pointer, std::align_val_t(alignof(ValueType<A>))); 202 #endif // defined(__cpp_sized_deallocation) 203 204 static_cast<void>(byte_count); 205 } 206 }; 207 #endif // defined(__cpp_aligned_new) 208 // absl:google3-end 209 210 template <typename A, typename ValueAdapter> 211 void ConstructElements(absl::internal::type_identity_t<A>& allocator, 212 Pointer<A> construct_first, ValueAdapter& values, 213 SizeType<A> construct_size) { 214 for (SizeType<A> i = 0; i < construct_size; ++i) { 215 ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); } 216 ABSL_INTERNAL_CATCH_ANY { 217 DestroyAdapter<A>::DestroyElements(allocator, construct_first, i); 218 ABSL_INTERNAL_RETHROW; 219 } 220 } 221 } 222 223 template <typename A, typename ValueAdapter> 224 void AssignElements(Pointer<A> assign_first, ValueAdapter& values, 225 SizeType<A> assign_size) { 226 for (SizeType<A> i = 0; i < assign_size; ++i) { 227 values.AssignNext(assign_first + i); 228 } 229 } 230 231 template <typename A> 232 struct StorageView { 233 Pointer<A> data; 234 SizeType<A> size; 235 SizeType<A> capacity; 236 }; 237 238 template <typename A, typename Iterator> 239 class IteratorValueAdapter { 240 public: 241 explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} 242 243 void ConstructNext(A& allocator, Pointer<A> construct_at) { 244 AllocatorTraits<A>::construct(allocator, construct_at, *it_); 245 ++it_; 246 } 247 248 void AssignNext(Pointer<A> assign_at) { 249 *assign_at = *it_; 250 ++it_; 251 } 252 253 private: 254 Iterator it_; 255 }; 256 257 template <typename A> 258 class CopyValueAdapter { 259 public: 260 explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {} 261 262 void ConstructNext(A& allocator, Pointer<A> construct_at) { 263 AllocatorTraits<A>::construct(allocator, construct_at, *ptr_); 264 } 265 266 void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; } 267 268 private: 269 ConstPointer<A> ptr_; 270 }; 271 272 template <typename A> 273 class DefaultValueAdapter { 274 public: 275 explicit DefaultValueAdapter() {} 276 277 void ConstructNext(A& allocator, Pointer<A> construct_at) { 278 AllocatorTraits<A>::construct(allocator, construct_at); 279 } 280 281 void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); } 282 }; 283 284 template <typename A> 285 class AllocationTransaction { 286 public: 287 explicit AllocationTransaction(A& allocator) 288 : allocator_data_(allocator, nullptr), capacity_(0) {} 289 290 ~AllocationTransaction() { 291 if (DidAllocate()) { 292 MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity()); 293 } 294 } 295 296 AllocationTransaction(const AllocationTransaction&) = delete; 297 void operator=(const AllocationTransaction&) = delete; 298 299 A& GetAllocator() { return allocator_data_.template get<0>(); } 300 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 301 SizeType<A>& GetCapacity() { return capacity_; } 302 303 bool DidAllocate() { return GetData() != nullptr; } 304 305 Pointer<A> Allocate(SizeType<A> requested_capacity) { 306 Allocation<A> result = 307 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 308 GetData() = result.data; 309 GetCapacity() = result.capacity; 310 return result.data; 311 } 312 313 ABSL_MUST_USE_RESULT Allocation<A> Release() && { 314 Allocation<A> result = {GetData(), GetCapacity()}; 315 Reset(); 316 return result; 317 } 318 319 private: 320 void Reset() { 321 GetData() = nullptr; 322 GetCapacity() = 0; 323 } 324 325 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 326 SizeType<A> capacity_; 327 }; 328 329 template <typename A> 330 class ConstructionTransaction { 331 public: 332 explicit ConstructionTransaction(A& allocator) 333 : allocator_data_(allocator, nullptr), size_(0) {} 334 335 ~ConstructionTransaction() { 336 if (DidConstruct()) { 337 DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize()); 338 } 339 } 340 341 ConstructionTransaction(const ConstructionTransaction&) = delete; 342 void operator=(const ConstructionTransaction&) = delete; 343 344 A& GetAllocator() { return allocator_data_.template get<0>(); } 345 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 346 SizeType<A>& GetSize() { return size_; } 347 348 bool DidConstruct() { return GetData() != nullptr; } 349 template <typename ValueAdapter> 350 void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) { 351 ConstructElements<A>(GetAllocator(), data, values, size); 352 GetData() = data; 353 GetSize() = size; 354 } 355 void Commit() && { 356 GetData() = nullptr; 357 GetSize() = 0; 358 } 359 360 private: 361 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 362 SizeType<A> size_; 363 }; 364 365 template <typename T, size_t N, typename A> 366 class Storage { 367 public: 368 struct MemcpyPolicy {}; 369 struct ElementwiseAssignPolicy {}; 370 struct ElementwiseSwapPolicy {}; 371 struct ElementwiseConstructPolicy {}; 372 373 using MoveAssignmentPolicy = absl::conditional_t< 374 // Fast path: if the value type can be trivially move assigned and 375 // destroyed, and we know the allocator doesn't do anything fancy, then 376 // it's safe for us to simply adopt the contents of the storage for 377 // `other` and remove its own reference to them. It's as if we had 378 // individually move-assigned each value and then destroyed the original. 379 absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>, 380 absl::is_trivially_destructible<ValueType<A>>, 381 std::is_same<A, std::allocator<ValueType<A>>>>::value, 382 MemcpyPolicy, 383 // Otherwise we use move assignment if possible. If not, we simulate 384 // move assignment using move construction. 385 // 386 // Note that this is in contrast to e.g. std::vector and std::optional, 387 // which are themselves not move-assignable when their contained type is 388 // not. 389 absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy, 390 ElementwiseConstructPolicy>>; 391 392 // The policy to be used specifically when swapping inlined elements. 393 using SwapInlinedElementsPolicy = absl::conditional_t< 394 // Fast path: if the value type can be trivially relocated, and we 395 // know the allocator doesn't do anything fancy, then it's safe for us 396 // to simply swap the bytes in the inline storage. It's as if we had 397 // relocated the first vector's elements into temporary storage, 398 // relocated the second's elements into the (now-empty) first's, 399 // and then relocated from temporary storage into the second. 400 absl::conjunction<absl::is_trivially_relocatable<ValueType<A>>, 401 std::is_same<A, std::allocator<ValueType<A>>>>::value, 402 MemcpyPolicy, 403 absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy, 404 ElementwiseConstructPolicy>>; 405 406 static SizeType<A> NextCapacity(SizeType<A> current_capacity) { 407 return current_capacity * 2; 408 } 409 410 static SizeType<A> ComputeCapacity(SizeType<A> current_capacity, 411 SizeType<A> requested_capacity) { 412 return (std::max)(NextCapacity(current_capacity), requested_capacity); 413 } 414 415 // --------------------------------------------------------------------------- 416 // Storage Constructors and Destructor 417 // --------------------------------------------------------------------------- 418 419 Storage() : metadata_(A(), /* size and is_allocated */ 0u) {} 420 421 explicit Storage(const A& allocator) 422 : metadata_(allocator, /* size and is_allocated */ 0u) {} 423 424 ~Storage() { 425 // Fast path: if we are empty and not allocated, there's nothing to do. 426 if (GetSizeAndIsAllocated() == 0) { 427 return; 428 } 429 430 // Fast path: if no destructors need to be run and we know the allocator 431 // doesn't do anything fancy, then all we need to do is deallocate (and 432 // maybe not even that). 433 if (absl::is_trivially_destructible<ValueType<A>>::value && 434 std::is_same<A, std::allocator<ValueType<A>>>::value) { 435 DeallocateIfAllocated(); 436 return; 437 } 438 439 DestroyContents(); 440 } 441 442 // --------------------------------------------------------------------------- 443 // Storage Member Accessors 444 // --------------------------------------------------------------------------- 445 446 SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } 447 448 const SizeType<A>& GetSizeAndIsAllocated() const { 449 return metadata_.template get<1>(); 450 } 451 452 SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } 453 454 bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } 455 456 Pointer<A> GetAllocatedData() { 457 // GCC 12 has a false-positive -Wmaybe-uninitialized warning here. 458 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) 459 #pragma GCC diagnostic push 460 #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" 461 #endif 462 return data_.allocated.allocated_data; 463 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) 464 #pragma GCC diagnostic pop 465 #endif 466 } 467 468 ConstPointer<A> GetAllocatedData() const { 469 return data_.allocated.allocated_data; 470 } 471 472 // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be 473 // uninitialized, a common pattern in allocate()+construct() APIs. 474 // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking 475 // NOTE: When this was written, LLVM documentation did not explicitly 476 // mention that casting `char*` and using `reinterpret_cast` qualifies 477 // as a bad cast. 478 ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { 479 return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); 480 } 481 482 ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const { 483 return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data); 484 } 485 486 SizeType<A> GetAllocatedCapacity() const { 487 return data_.allocated.allocated_capacity; 488 } 489 490 SizeType<A> GetInlinedCapacity() const { 491 return static_cast<SizeType<A>>(kOptimalInlinedSize); 492 } 493 494 StorageView<A> MakeStorageView() { 495 return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(), 496 GetAllocatedCapacity()} 497 : StorageView<A>{GetInlinedData(), GetSize(), 498 GetInlinedCapacity()}; 499 } 500 501 A& GetAllocator() { return metadata_.template get<0>(); } 502 503 const A& GetAllocator() const { return metadata_.template get<0>(); } 504 505 // --------------------------------------------------------------------------- 506 // Storage Member Mutators 507 // --------------------------------------------------------------------------- 508 509 ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other); 510 511 template <typename ValueAdapter> 512 void Initialize(ValueAdapter values, SizeType<A> new_size); 513 514 template <typename ValueAdapter> 515 void Assign(ValueAdapter values, SizeType<A> new_size); 516 517 template <typename ValueAdapter> 518 void Resize(ValueAdapter values, SizeType<A> new_size); 519 520 template <typename ValueAdapter> 521 Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values, 522 SizeType<A> insert_count); 523 524 template <typename... Args> 525 Reference<A> EmplaceBack(Args&&... args); 526 527 Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to); 528 529 void Reserve(SizeType<A> requested_capacity); 530 531 void ShrinkToFit(); 532 533 void Swap(Storage* other_storage_ptr); 534 535 void SetIsAllocated() { 536 GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1); 537 } 538 539 void UnsetIsAllocated() { 540 GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1); 541 } 542 543 void SetSize(SizeType<A> size) { 544 GetSizeAndIsAllocated() = 545 (size << 1) | static_cast<SizeType<A>>(GetIsAllocated()); 546 } 547 548 void SetAllocatedSize(SizeType<A> size) { 549 GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1); 550 } 551 552 void SetInlinedSize(SizeType<A> size) { 553 GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1); 554 } 555 556 void AddSize(SizeType<A> count) { 557 GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1); 558 } 559 560 void SubtractSize(SizeType<A> count) { 561 ABSL_HARDENING_ASSERT(count <= GetSize()); 562 563 GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1); 564 } 565 566 void SetAllocation(Allocation<A> allocation) { 567 data_.allocated.allocated_data = allocation.data; 568 data_.allocated.allocated_capacity = allocation.capacity; 569 } 570 571 void MemcpyFrom(const Storage& other_storage) { 572 // Assumption check: it doesn't make sense to memcpy inlined elements unless 573 // we know the allocator doesn't do anything fancy, and one of the following 574 // holds: 575 // 576 // * The elements are trivially relocatable. 577 // 578 // * It's possible to trivially assign the elements and then destroy the 579 // source. 580 // 581 // * It's possible to trivially copy construct/assign the elements. 582 // 583 { 584 using V = ValueType<A>; 585 ABSL_HARDENING_ASSERT( 586 other_storage.GetIsAllocated() || 587 (std::is_same<A, std::allocator<V>>::value && 588 ( 589 // First case above 590 absl::is_trivially_relocatable<V>::value || 591 // Second case above 592 (absl::is_trivially_move_assignable<V>::value && 593 absl::is_trivially_destructible<V>::value) || 594 // Third case above 595 (absl::is_trivially_copy_constructible<V>::value || 596 absl::is_trivially_copy_assignable<V>::value)))); 597 } 598 599 GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); 600 data_ = other_storage.data_; 601 } 602 603 void DeallocateIfAllocated() { 604 if (GetIsAllocated()) { 605 MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(), 606 GetAllocatedCapacity()); 607 } 608 } 609 610 private: 611 ABSL_ATTRIBUTE_NOINLINE void DestroyContents(); 612 613 using Metadata = container_internal::CompressedTuple<A, SizeType<A>>; 614 615 struct Allocated { 616 Pointer<A> allocated_data; 617 SizeType<A> allocated_capacity; 618 }; 619 620 // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the 621 // `InlinedVector`. Sometimes, it is possible to increase the capacity (from 622 // the user requested `N`) without increasing the size of the `InlinedVector`. 623 static constexpr size_t kOptimalInlinedSize = 624 (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>)); 625 626 struct Inlined { 627 alignas(ValueType<A>) char inlined_data[sizeof( 628 ValueType<A>[kOptimalInlinedSize])]; 629 }; 630 631 union Data { 632 Allocated allocated; 633 Inlined inlined; 634 }; 635 636 void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n); 637 void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n); 638 639 void SwapInlinedElements(MemcpyPolicy, Storage* other); 640 template <typename NotMemcpyPolicy> 641 void SwapInlinedElements(NotMemcpyPolicy, Storage* other); 642 643 template <typename... Args> 644 ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args); 645 646 Metadata metadata_; 647 Data data_; 648 }; 649 650 template <typename T, size_t N, typename A> 651 void Storage<T, N, A>::DestroyContents() { 652 Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); 653 DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize()); 654 DeallocateIfAllocated(); 655 } 656 657 template <typename T, size_t N, typename A> 658 void Storage<T, N, A>::InitFrom(const Storage& other) { 659 const SizeType<A> n = other.GetSize(); 660 ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller. 661 ConstPointer<A> src; 662 Pointer<A> dst; 663 if (!other.GetIsAllocated()) { 664 dst = GetInlinedData(); 665 src = other.GetInlinedData(); 666 } else { 667 // Because this is only called from the `InlinedVector` constructors, it's 668 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 669 // throws, deallocation will be automatically handled by `~Storage()`. 670 SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n); 671 Allocation<A> allocation = 672 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 673 SetAllocation(allocation); 674 dst = allocation.data; 675 src = other.GetAllocatedData(); 676 } 677 678 // Fast path: if the value type is trivially copy constructible and we know 679 // the allocator doesn't do anything fancy, then we know it is legal for us to 680 // simply memcpy the other vector's elements. 681 if (absl::is_trivially_copy_constructible<ValueType<A>>::value && 682 std::is_same<A, std::allocator<ValueType<A>>>::value) { 683 std::memcpy(reinterpret_cast<char*>(dst), 684 reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>)); 685 } else { 686 auto values = IteratorValueAdapter<A, ConstPointer<A>>(src); 687 ConstructElements<A>(GetAllocator(), dst, values, n); 688 } 689 690 GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated(); 691 } 692 693 template <typename T, size_t N, typename A> 694 template <typename ValueAdapter> 695 auto Storage<T, N, A>::Initialize(ValueAdapter values, 696 SizeType<A> new_size) -> void { 697 // Only callable from constructors! 698 ABSL_HARDENING_ASSERT(!GetIsAllocated()); 699 ABSL_HARDENING_ASSERT(GetSize() == 0); 700 701 Pointer<A> construct_data; 702 if (new_size > GetInlinedCapacity()) { 703 // Because this is only called from the `InlinedVector` constructors, it's 704 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 705 // throws, deallocation will be automatically handled by `~Storage()`. 706 SizeType<A> requested_capacity = 707 ComputeCapacity(GetInlinedCapacity(), new_size); 708 Allocation<A> allocation = 709 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 710 construct_data = allocation.data; 711 SetAllocation(allocation); 712 SetIsAllocated(); 713 } else { 714 construct_data = GetInlinedData(); 715 } 716 717 ConstructElements<A>(GetAllocator(), construct_data, values, new_size); 718 719 // Since the initial size was guaranteed to be `0` and the allocated bit is 720 // already correct for either case, *adding* `new_size` gives us the correct 721 // result faster than setting it directly. 722 AddSize(new_size); 723 } 724 725 template <typename T, size_t N, typename A> 726 template <typename ValueAdapter> 727 auto Storage<T, N, A>::Assign(ValueAdapter values, 728 SizeType<A> new_size) -> void { 729 StorageView<A> storage_view = MakeStorageView(); 730 731 AllocationTransaction<A> allocation_tx(GetAllocator()); 732 733 absl::Span<ValueType<A>> assign_loop; 734 absl::Span<ValueType<A>> construct_loop; 735 absl::Span<ValueType<A>> destroy_loop; 736 737 if (new_size > storage_view.capacity) { 738 SizeType<A> requested_capacity = 739 ComputeCapacity(storage_view.capacity, new_size); 740 construct_loop = {allocation_tx.Allocate(requested_capacity), new_size}; 741 destroy_loop = {storage_view.data, storage_view.size}; 742 } else if (new_size > storage_view.size) { 743 assign_loop = {storage_view.data, storage_view.size}; 744 construct_loop = {storage_view.data + storage_view.size, 745 new_size - storage_view.size}; 746 } else { 747 assign_loop = {storage_view.data, new_size}; 748 destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; 749 } 750 751 AssignElements<A>(assign_loop.data(), values, assign_loop.size()); 752 753 ConstructElements<A>(GetAllocator(), construct_loop.data(), values, 754 construct_loop.size()); 755 756 DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(), 757 destroy_loop.size()); 758 759 if (allocation_tx.DidAllocate()) { 760 DeallocateIfAllocated(); 761 SetAllocation(std::move(allocation_tx).Release()); 762 SetIsAllocated(); 763 } 764 765 SetSize(new_size); 766 } 767 768 template <typename T, size_t N, typename A> 769 template <typename ValueAdapter> 770 auto Storage<T, N, A>::Resize(ValueAdapter values, 771 SizeType<A> new_size) -> void { 772 StorageView<A> storage_view = MakeStorageView(); 773 Pointer<A> const base = storage_view.data; 774 const SizeType<A> size = storage_view.size; 775 A& alloc = GetAllocator(); 776 if (new_size <= size) { 777 // Destroy extra old elements. 778 DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size); 779 } else if (new_size <= storage_view.capacity) { 780 // Construct new elements in place. 781 ConstructElements<A>(alloc, base + size, values, new_size - size); 782 } else { 783 // Steps: 784 // a. Allocate new backing store. 785 // b. Construct new elements in new backing store. 786 // c. Move existing elements from old backing store to new backing store. 787 // d. Destroy all elements in old backing store. 788 // Use transactional wrappers for the first two steps so we can roll 789 // back if necessary due to exceptions. 790 AllocationTransaction<A> allocation_tx(alloc); 791 SizeType<A> requested_capacity = 792 ComputeCapacity(storage_view.capacity, new_size); 793 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 794 795 ConstructionTransaction<A> construction_tx(alloc); 796 construction_tx.Construct(new_data + size, values, new_size - size); 797 798 IteratorValueAdapter<A, MoveIterator<A>> move_values( 799 (MoveIterator<A>(base))); 800 ConstructElements<A>(alloc, new_data, move_values, size); 801 802 DestroyAdapter<A>::DestroyElements(alloc, base, size); 803 std::move(construction_tx).Commit(); 804 DeallocateIfAllocated(); 805 SetAllocation(std::move(allocation_tx).Release()); 806 SetIsAllocated(); 807 } 808 SetSize(new_size); 809 } 810 811 template <typename T, size_t N, typename A> 812 template <typename ValueAdapter> 813 auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values, 814 SizeType<A> insert_count) -> Iterator<A> { 815 StorageView<A> storage_view = MakeStorageView(); 816 817 auto insert_index = static_cast<SizeType<A>>( 818 std::distance(ConstIterator<A>(storage_view.data), pos)); 819 SizeType<A> insert_end_index = insert_index + insert_count; 820 SizeType<A> new_size = storage_view.size + insert_count; 821 822 if (new_size > storage_view.capacity) { 823 AllocationTransaction<A> allocation_tx(GetAllocator()); 824 ConstructionTransaction<A> construction_tx(GetAllocator()); 825 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 826 827 IteratorValueAdapter<A, MoveIterator<A>> move_values( 828 MoveIterator<A>(storage_view.data)); 829 830 SizeType<A> requested_capacity = 831 ComputeCapacity(storage_view.capacity, new_size); 832 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 833 834 construction_tx.Construct(new_data + insert_index, values, insert_count); 835 836 move_construction_tx.Construct(new_data, move_values, insert_index); 837 838 ConstructElements<A>(GetAllocator(), new_data + insert_end_index, 839 move_values, storage_view.size - insert_index); 840 841 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 842 storage_view.size); 843 844 std::move(construction_tx).Commit(); 845 std::move(move_construction_tx).Commit(); 846 DeallocateIfAllocated(); 847 SetAllocation(std::move(allocation_tx).Release()); 848 849 SetAllocatedSize(new_size); 850 return Iterator<A>(new_data + insert_index); 851 } else { 852 SizeType<A> move_construction_destination_index = 853 (std::max)(insert_end_index, storage_view.size); 854 855 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 856 857 IteratorValueAdapter<A, MoveIterator<A>> move_construction_values( 858 MoveIterator<A>(storage_view.data + 859 (move_construction_destination_index - insert_count))); 860 absl::Span<ValueType<A>> move_construction = { 861 storage_view.data + move_construction_destination_index, 862 new_size - move_construction_destination_index}; 863 864 Pointer<A> move_assignment_values = storage_view.data + insert_index; 865 absl::Span<ValueType<A>> move_assignment = { 866 storage_view.data + insert_end_index, 867 move_construction_destination_index - insert_end_index}; 868 869 absl::Span<ValueType<A>> insert_assignment = {move_assignment_values, 870 move_construction.size()}; 871 872 absl::Span<ValueType<A>> insert_construction = { 873 insert_assignment.data() + insert_assignment.size(), 874 insert_count - insert_assignment.size()}; 875 876 move_construction_tx.Construct(move_construction.data(), 877 move_construction_values, 878 move_construction.size()); 879 880 for (Pointer<A> 881 destination = move_assignment.data() + move_assignment.size(), 882 last_destination = move_assignment.data(), 883 source = move_assignment_values + move_assignment.size(); 884 ;) { 885 --destination; 886 --source; 887 if (destination < last_destination) break; 888 *destination = std::move(*source); 889 } 890 891 AssignElements<A>(insert_assignment.data(), values, 892 insert_assignment.size()); 893 894 ConstructElements<A>(GetAllocator(), insert_construction.data(), values, 895 insert_construction.size()); 896 897 std::move(move_construction_tx).Commit(); 898 899 AddSize(insert_count); 900 return Iterator<A>(storage_view.data + insert_index); 901 } 902 } 903 904 template <typename T, size_t N, typename A> 905 template <typename... Args> 906 auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> { 907 StorageView<A> storage_view = MakeStorageView(); 908 const SizeType<A> n = storage_view.size; 909 if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) { 910 // Fast path; new element fits. 911 Pointer<A> last_ptr = storage_view.data + n; 912 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 913 std::forward<Args>(args)...); 914 AddSize(1); 915 return *last_ptr; 916 } 917 // TODO(b/173712035): Annotate with musttail attribute to prevent regression. 918 return EmplaceBackSlow(std::forward<Args>(args)...); 919 } 920 921 template <typename T, size_t N, typename A> 922 template <typename... Args> 923 auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> { 924 StorageView<A> storage_view = MakeStorageView(); 925 AllocationTransaction<A> allocation_tx(GetAllocator()); 926 IteratorValueAdapter<A, MoveIterator<A>> move_values( 927 MoveIterator<A>(storage_view.data)); 928 SizeType<A> requested_capacity = NextCapacity(storage_view.capacity); 929 Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity); 930 Pointer<A> last_ptr = construct_data + storage_view.size; 931 932 // Construct new element. 933 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 934 std::forward<Args>(args)...); 935 // Move elements from old backing store to new backing store. 936 ABSL_INTERNAL_TRY { 937 ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values, 938 storage_view.size); 939 } 940 ABSL_INTERNAL_CATCH_ANY { 941 AllocatorTraits<A>::destroy(GetAllocator(), last_ptr); 942 ABSL_INTERNAL_RETHROW; 943 } 944 // Destroy elements in old backing store. 945 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 946 storage_view.size); 947 948 DeallocateIfAllocated(); 949 SetAllocation(std::move(allocation_tx).Release()); 950 SetIsAllocated(); 951 AddSize(1); 952 return *last_ptr; 953 } 954 955 template <typename T, size_t N, typename A> 956 auto Storage<T, N, A>::Erase(ConstIterator<A> from, 957 ConstIterator<A> to) -> Iterator<A> { 958 StorageView<A> storage_view = MakeStorageView(); 959 960 auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); 961 auto erase_index = static_cast<SizeType<A>>( 962 std::distance(ConstIterator<A>(storage_view.data), from)); 963 SizeType<A> erase_end_index = erase_index + erase_size; 964 965 // Fast path: if the value type is trivially relocatable and we know 966 // the allocator doesn't do anything fancy, then we know it is legal for us to 967 // simply destroy the elements in the "erasure window" (which cannot throw) 968 // and then memcpy downward to close the window. 969 if (absl::is_trivially_relocatable<ValueType<A>>::value && 970 std::is_nothrow_destructible<ValueType<A>>::value && 971 std::is_same<A, std::allocator<ValueType<A>>>::value) { 972 DestroyAdapter<A>::DestroyElements( 973 GetAllocator(), storage_view.data + erase_index, erase_size); 974 std::memmove( 975 reinterpret_cast<char*>(storage_view.data + erase_index), 976 reinterpret_cast<const char*>(storage_view.data + erase_end_index), 977 (storage_view.size - erase_end_index) * sizeof(ValueType<A>)); 978 } else { 979 IteratorValueAdapter<A, MoveIterator<A>> move_values( 980 MoveIterator<A>(storage_view.data + erase_end_index)); 981 982 AssignElements<A>(storage_view.data + erase_index, move_values, 983 storage_view.size - erase_end_index); 984 985 DestroyAdapter<A>::DestroyElements( 986 GetAllocator(), storage_view.data + (storage_view.size - erase_size), 987 erase_size); 988 } 989 SubtractSize(erase_size); 990 return Iterator<A>(storage_view.data + erase_index); 991 } 992 993 template <typename T, size_t N, typename A> 994 auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void { 995 StorageView<A> storage_view = MakeStorageView(); 996 997 if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; 998 999 AllocationTransaction<A> allocation_tx(GetAllocator()); 1000 1001 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1002 MoveIterator<A>(storage_view.data)); 1003 1004 SizeType<A> new_requested_capacity = 1005 ComputeCapacity(storage_view.capacity, requested_capacity); 1006 Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity); 1007 1008 ConstructElements<A>(GetAllocator(), new_data, move_values, 1009 storage_view.size); 1010 1011 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 1012 storage_view.size); 1013 1014 DeallocateIfAllocated(); 1015 SetAllocation(std::move(allocation_tx).Release()); 1016 SetIsAllocated(); 1017 } 1018 1019 template <typename T, size_t N, typename A> 1020 auto Storage<T, N, A>::ShrinkToFit() -> void { 1021 // May only be called on allocated instances! 1022 ABSL_HARDENING_ASSERT(GetIsAllocated()); 1023 1024 StorageView<A> storage_view{GetAllocatedData(), GetSize(), 1025 GetAllocatedCapacity()}; 1026 1027 if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return; 1028 1029 AllocationTransaction<A> allocation_tx(GetAllocator()); 1030 1031 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1032 MoveIterator<A>(storage_view.data)); 1033 1034 Pointer<A> construct_data; 1035 if (storage_view.size > GetInlinedCapacity()) { 1036 SizeType<A> requested_capacity = storage_view.size; 1037 construct_data = allocation_tx.Allocate(requested_capacity); 1038 if (allocation_tx.GetCapacity() >= storage_view.capacity) { 1039 // Already using the smallest available heap allocation. 1040 return; 1041 } 1042 } else { 1043 construct_data = GetInlinedData(); 1044 } 1045 1046 ABSL_INTERNAL_TRY { 1047 ConstructElements<A>(GetAllocator(), construct_data, move_values, 1048 storage_view.size); 1049 } 1050 ABSL_INTERNAL_CATCH_ANY { 1051 SetAllocation({storage_view.data, storage_view.capacity}); 1052 ABSL_INTERNAL_RETHROW; 1053 } 1054 1055 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 1056 storage_view.size); 1057 1058 MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data, 1059 storage_view.capacity); 1060 1061 if (allocation_tx.DidAllocate()) { 1062 SetAllocation(std::move(allocation_tx).Release()); 1063 } else { 1064 UnsetIsAllocated(); 1065 } 1066 } 1067 1068 template <typename T, size_t N, typename A> 1069 auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void { 1070 using std::swap; 1071 ABSL_HARDENING_ASSERT(this != other_storage_ptr); 1072 1073 if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { 1074 swap(data_.allocated, other_storage_ptr->data_.allocated); 1075 } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { 1076 SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr); 1077 } else { 1078 Storage* allocated_ptr = this; 1079 Storage* inlined_ptr = other_storage_ptr; 1080 if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr); 1081 1082 StorageView<A> allocated_storage_view{ 1083 allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(), 1084 allocated_ptr->GetAllocatedCapacity()}; 1085 1086 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1087 MoveIterator<A>(inlined_ptr->GetInlinedData())); 1088 1089 ABSL_INTERNAL_TRY { 1090 ConstructElements<A>(inlined_ptr->GetAllocator(), 1091 allocated_ptr->GetInlinedData(), move_values, 1092 inlined_ptr->GetSize()); 1093 } 1094 ABSL_INTERNAL_CATCH_ANY { 1095 allocated_ptr->SetAllocation(Allocation<A>{ 1096 allocated_storage_view.data, allocated_storage_view.capacity}); 1097 ABSL_INTERNAL_RETHROW; 1098 } 1099 1100 DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(), 1101 inlined_ptr->GetInlinedData(), 1102 inlined_ptr->GetSize()); 1103 1104 inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data, 1105 allocated_storage_view.capacity}); 1106 } 1107 1108 swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated()); 1109 swap(GetAllocator(), other_storage_ptr->GetAllocator()); 1110 } 1111 1112 template <typename T, size_t N, typename A> 1113 void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other, 1114 SizeType<A> n) { 1115 std::swap_ranges(GetInlinedData(), GetInlinedData() + n, 1116 other->GetInlinedData()); 1117 } 1118 1119 template <typename T, size_t N, typename A> 1120 void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other, 1121 SizeType<A> n) { 1122 Pointer<A> a = GetInlinedData(); 1123 Pointer<A> b = other->GetInlinedData(); 1124 // see note on allocators in `SwapInlinedElements`. 1125 A& allocator_a = GetAllocator(); 1126 A& allocator_b = other->GetAllocator(); 1127 for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) { 1128 ValueType<A> tmp(std::move(*a)); 1129 1130 AllocatorTraits<A>::destroy(allocator_a, a); 1131 AllocatorTraits<A>::construct(allocator_b, a, std::move(*b)); 1132 1133 AllocatorTraits<A>::destroy(allocator_b, b); 1134 AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp)); 1135 } 1136 } 1137 1138 template <typename T, size_t N, typename A> 1139 void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) { 1140 Data tmp = data_; 1141 data_ = other->data_; 1142 other->data_ = tmp; 1143 } 1144 1145 template <typename T, size_t N, typename A> 1146 template <typename NotMemcpyPolicy> 1147 void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy, 1148 Storage* other) { 1149 // Note: `destroy` needs to use pre-swap allocator while `construct` - 1150 // post-swap allocator. Allocators will be swapped later on outside of 1151 // `SwapInlinedElements`. 1152 Storage* small_ptr = this; 1153 Storage* large_ptr = other; 1154 if (small_ptr->GetSize() > large_ptr->GetSize()) { 1155 std::swap(small_ptr, large_ptr); 1156 } 1157 1158 auto small_size = small_ptr->GetSize(); 1159 auto diff = large_ptr->GetSize() - small_size; 1160 SwapN(policy, other, small_size); 1161 1162 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1163 MoveIterator<A>(large_ptr->GetInlinedData() + small_size)); 1164 1165 ConstructElements<A>(large_ptr->GetAllocator(), 1166 small_ptr->GetInlinedData() + small_size, move_values, 1167 diff); 1168 1169 DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(), 1170 large_ptr->GetInlinedData() + small_size, 1171 diff); 1172 } 1173 1174 // End ignore "array-bounds" 1175 #if !defined(__clang__) && defined(__GNUC__) 1176 #pragma GCC diagnostic pop 1177 #endif 1178 1179 } // namespace inlined_vector_internal 1180 ABSL_NAMESPACE_END 1181 } // namespace absl 1182 1183 #endif // THIRD_PARTY_ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_