1 // Copyright 2019 The Abseil Authors. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // https://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 16 #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 17 18 #include <algorithm> 19 #include <cstddef> 20 #include <cstring> 21 #include <iterator> 22 #include <limits> 23 #include <memory> 24 #include <new> 25 #include <type_traits> 26 #include <utility> 27 28 #include "absl/base/attributes.h" 29 #include "absl/base/config.h" 30 #include "absl/base/internal/identity.h" 31 #include "absl/base/macros.h" 32 #include "absl/container/internal/compressed_tuple.h" 33 #include "absl/memory/memory.h" 34 #include "absl/meta/type_traits.h" 35 #include "absl/types/span.h" 36 37 namespace absl { 38 ABSL_NAMESPACE_BEGIN 39 namespace inlined_vector_internal { 40 41 // GCC does not deal very well with the below code 42 #if !defined(__clang__) && defined(__GNUC__) 43 #pragma GCC diagnostic push 44 #pragma GCC diagnostic ignored "-Warray-bounds" 45 #endif 46 47 template <typename A> 48 using AllocatorTraits = std::allocator_traits<A>; 49 template <typename A> 50 using ValueType = typename AllocatorTraits<A>::value_type; 51 template <typename A> 52 using SizeType = typename AllocatorTraits<A>::size_type; 53 template <typename A> 54 using Pointer = typename AllocatorTraits<A>::pointer; 55 template <typename A> 56 using ConstPointer = typename AllocatorTraits<A>::const_pointer; 57 template <typename A> 58 using SizeType = typename AllocatorTraits<A>::size_type; 59 template <typename A> 60 using DifferenceType = typename AllocatorTraits<A>::difference_type; 61 template <typename A> 62 using Reference = ValueType<A>&; 63 template <typename A> 64 using ConstReference = const ValueType<A>&; 65 template <typename A> 66 using Iterator = Pointer<A>; 67 template <typename A> 68 using ConstIterator = ConstPointer<A>; 69 template <typename A> 70 using ReverseIterator = typename std::reverse_iterator<Iterator<A>>; 71 template <typename A> 72 using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>; 73 template <typename A> 74 using MoveIterator = typename std::move_iterator<Iterator<A>>; 75 76 template <typename Iterator> 77 using IsAtLeastForwardIterator = std::is_convertible< 78 typename std::iterator_traits<Iterator>::iterator_category, 79 std::forward_iterator_tag>; 80 81 template <typename A> 82 using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>; 83 template <typename A> 84 using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>; 85 86 template <typename A, bool IsTriviallyDestructible = 87 absl::is_trivially_destructible<ValueType<A>>::value> 88 struct DestroyAdapter; 89 90 template <typename A> 91 struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> { 92 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 93 SizeType<A> destroy_size) { 94 for (SizeType<A> i = destroy_size; i != 0;) { 95 --i; 96 AllocatorTraits<A>::destroy(allocator, destroy_first + i); 97 } 98 } 99 }; 100 101 template <typename A> 102 struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> { 103 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 104 SizeType<A> destroy_size) { 105 static_cast<void>(allocator); 106 static_cast<void>(destroy_first); 107 static_cast<void>(destroy_size); 108 } 109 }; 110 111 template <typename A> 112 struct Allocation { 113 Pointer<A> data = nullptr; 114 SizeType<A> capacity = 0; 115 }; 116 117 template <typename A, 118 bool IsOverAligned = 119 (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)> 120 struct MallocAdapter { 121 static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) { 122 return {AllocatorTraits<A>::allocate(allocator, requested_capacity), 123 requested_capacity}; 124 } 125 126 static void Deallocate(A& allocator, Pointer<A> pointer, 127 SizeType<A> capacity) { 128 AllocatorTraits<A>::deallocate(allocator, pointer, capacity); 129 } 130 }; 131 132 template <typename A, typename ValueAdapter> 133 void ConstructElements(absl::internal::type_identity_t<A>& allocator, 134 Pointer<A> construct_first, ValueAdapter& values, 135 SizeType<A> construct_size) { 136 for (SizeType<A> i = 0; i < construct_size; ++i) { 137 ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); } 138 ABSL_INTERNAL_CATCH_ANY { 139 DestroyAdapter<A>::DestroyElements(allocator, construct_first, i); 140 ABSL_INTERNAL_RETHROW; 141 } 142 } 143 } 144 145 template <typename A, typename ValueAdapter> 146 void AssignElements(Pointer<A> assign_first, ValueAdapter& values, 147 SizeType<A> assign_size) { 148 for (SizeType<A> i = 0; i < assign_size; ++i) { 149 values.AssignNext(assign_first + i); 150 } 151 } 152 153 template <typename A> 154 struct StorageView { 155 Pointer<A> data; 156 SizeType<A> size; 157 SizeType<A> capacity; 158 }; 159 160 template <typename A, typename Iterator> 161 class IteratorValueAdapter { 162 public: 163 explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} 164 165 void ConstructNext(A& allocator, Pointer<A> construct_at) { 166 AllocatorTraits<A>::construct(allocator, construct_at, *it_); 167 ++it_; 168 } 169 170 void AssignNext(Pointer<A> assign_at) { 171 *assign_at = *it_; 172 ++it_; 173 } 174 175 private: 176 Iterator it_; 177 }; 178 179 template <typename A> 180 class CopyValueAdapter { 181 public: 182 explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {} 183 184 void ConstructNext(A& allocator, Pointer<A> construct_at) { 185 AllocatorTraits<A>::construct(allocator, construct_at, *ptr_); 186 } 187 188 void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; } 189 190 private: 191 ConstPointer<A> ptr_; 192 }; 193 194 template <typename A> 195 class DefaultValueAdapter { 196 public: 197 explicit DefaultValueAdapter() {} 198 199 void ConstructNext(A& allocator, Pointer<A> construct_at) { 200 AllocatorTraits<A>::construct(allocator, construct_at); 201 } 202 203 void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); } 204 }; 205 206 template <typename A> 207 class AllocationTransaction { 208 public: 209 explicit AllocationTransaction(A& allocator) 210 : allocator_data_(allocator, nullptr), capacity_(0) {} 211 212 ~AllocationTransaction() { 213 if (DidAllocate()) { 214 MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity()); 215 } 216 } 217 218 AllocationTransaction(const AllocationTransaction&) = delete; 219 void operator=(const AllocationTransaction&) = delete; 220 221 A& GetAllocator() { return allocator_data_.template get<0>(); } 222 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 223 SizeType<A>& GetCapacity() { return capacity_; } 224 225 bool DidAllocate() { return GetData() != nullptr; } 226 227 Pointer<A> Allocate(SizeType<A> requested_capacity) { 228 Allocation<A> result = 229 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 230 GetData() = result.data; 231 GetCapacity() = result.capacity; 232 return result.data; 233 } 234 235 ABSL_MUST_USE_RESULT Allocation<A> Release() && { 236 Allocation<A> result = {GetData(), GetCapacity()}; 237 Reset(); 238 return result; 239 } 240 241 private: 242 void Reset() { 243 GetData() = nullptr; 244 GetCapacity() = 0; 245 } 246 247 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 248 SizeType<A> capacity_; 249 }; 250 251 template <typename A> 252 class ConstructionTransaction { 253 public: 254 explicit ConstructionTransaction(A& allocator) 255 : allocator_data_(allocator, nullptr), size_(0) {} 256 257 ~ConstructionTransaction() { 258 if (DidConstruct()) { 259 DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize()); 260 } 261 } 262 263 ConstructionTransaction(const ConstructionTransaction&) = delete; 264 void operator=(const ConstructionTransaction&) = delete; 265 266 A& GetAllocator() { return allocator_data_.template get<0>(); } 267 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 268 SizeType<A>& GetSize() { return size_; } 269 270 bool DidConstruct() { return GetData() != nullptr; } 271 template <typename ValueAdapter> 272 void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) { 273 ConstructElements<A>(GetAllocator(), data, values, size); 274 GetData() = data; 275 GetSize() = size; 276 } 277 void Commit() && { 278 GetData() = nullptr; 279 GetSize() = 0; 280 } 281 282 private: 283 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 284 SizeType<A> size_; 285 }; 286 287 template <typename T, size_t N, typename A> 288 class Storage { 289 public: 290 struct MemcpyPolicy {}; 291 struct ElementwiseAssignPolicy {}; 292 struct ElementwiseSwapPolicy {}; 293 struct ElementwiseConstructPolicy {}; 294 295 using MoveAssignmentPolicy = absl::conditional_t< 296 // Fast path: if the value type can be trivially move assigned and 297 // destroyed, and we know the allocator doesn't do anything fancy, then 298 // it's safe for us to simply adopt the contents of the storage for 299 // `other` and remove its own reference to them. It's as if we had 300 // individually move-assigned each value and then destroyed the original. 301 absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>, 302 absl::is_trivially_destructible<ValueType<A>>, 303 std::is_same<A, std::allocator<ValueType<A>>>>::value, 304 MemcpyPolicy, 305 // Otherwise we use move assignment if possible. If not, we simulate 306 // move assignment using move construction. 307 // 308 // Note that this is in contrast to e.g. std::vector and std::optional, 309 // which are themselves not move-assignable when their contained type is 310 // not. 311 absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy, 312 ElementwiseConstructPolicy>>; 313 314 // The policy to be used specifically when swapping inlined elements. 315 using SwapInlinedElementsPolicy = absl::conditional_t< 316 // Fast path: if the value type can be trivially relocated, and we 317 // know the allocator doesn't do anything fancy, then it's safe for us 318 // to simply swap the bytes in the inline storage. It's as if we had 319 // relocated the first vector's elements into temporary storage, 320 // relocated the second's elements into the (now-empty) first's, 321 // and then relocated from temporary storage into the second. 322 absl::conjunction<absl::is_trivially_relocatable<ValueType<A>>, 323 std::is_same<A, std::allocator<ValueType<A>>>>::value, 324 MemcpyPolicy, 325 absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy, 326 ElementwiseConstructPolicy>>; 327 328 static SizeType<A> NextCapacity(SizeType<A> current_capacity) { 329 return current_capacity * 2; 330 } 331 332 static SizeType<A> ComputeCapacity(SizeType<A> current_capacity, 333 SizeType<A> requested_capacity) { 334 return (std::max)(NextCapacity(current_capacity), requested_capacity); 335 } 336 337 // --------------------------------------------------------------------------- 338 // Storage Constructors and Destructor 339 // --------------------------------------------------------------------------- 340 341 Storage() : metadata_(A(), /* size and is_allocated */ 0u) {} 342 343 explicit Storage(const A& allocator) 344 : metadata_(allocator, /* size and is_allocated */ 0u) {} 345 346 ~Storage() { 347 // Fast path: if we are empty and not allocated, there's nothing to do. 348 if (GetSizeAndIsAllocated() == 0) { 349 return; 350 } 351 352 // Fast path: if no destructors need to be run and we know the allocator 353 // doesn't do anything fancy, then all we need to do is deallocate (and 354 // maybe not even that). 355 if (absl::is_trivially_destructible<ValueType<A>>::value && 356 std::is_same<A, std::allocator<ValueType<A>>>::value) { 357 DeallocateIfAllocated(); 358 return; 359 } 360 361 DestroyContents(); 362 } 363 364 // --------------------------------------------------------------------------- 365 // Storage Member Accessors 366 // --------------------------------------------------------------------------- 367 368 SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } 369 370 const SizeType<A>& GetSizeAndIsAllocated() const { 371 return metadata_.template get<1>(); 372 } 373 374 SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } 375 376 bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } 377 378 Pointer<A> GetAllocatedData() { 379 // GCC 12 has a false-positive -Wmaybe-uninitialized warning here. 380 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) 381 #pragma GCC diagnostic push 382 #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" 383 #endif 384 return data_.allocated.allocated_data; 385 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) 386 #pragma GCC diagnostic pop 387 #endif 388 } 389 390 ConstPointer<A> GetAllocatedData() const { 391 return data_.allocated.allocated_data; 392 } 393 394 // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be 395 // uninitialized, a common pattern in allocate()+construct() APIs. 396 // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking 397 // NOTE: When this was written, LLVM documentation did not explicitly 398 // mention that casting `char*` and using `reinterpret_cast` qualifies 399 // as a bad cast. 400 ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { 401 return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); 402 } 403 404 ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const { 405 return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data); 406 } 407 408 SizeType<A> GetAllocatedCapacity() const { 409 return data_.allocated.allocated_capacity; 410 } 411 412 SizeType<A> GetInlinedCapacity() const { 413 return static_cast<SizeType<A>>(kOptimalInlinedSize); 414 } 415 416 StorageView<A> MakeStorageView() { 417 return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(), 418 GetAllocatedCapacity()} 419 : StorageView<A>{GetInlinedData(), GetSize(), 420 GetInlinedCapacity()}; 421 } 422 423 A& GetAllocator() { return metadata_.template get<0>(); } 424 425 const A& GetAllocator() const { return metadata_.template get<0>(); } 426 427 // --------------------------------------------------------------------------- 428 // Storage Member Mutators 429 // --------------------------------------------------------------------------- 430 431 ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other); 432 433 template <typename ValueAdapter> 434 void Initialize(ValueAdapter values, SizeType<A> new_size); 435 436 template <typename ValueAdapter> 437 void Assign(ValueAdapter values, SizeType<A> new_size); 438 439 template <typename ValueAdapter> 440 void Resize(ValueAdapter values, SizeType<A> new_size); 441 442 template <typename ValueAdapter> 443 Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values, 444 SizeType<A> insert_count); 445 446 template <typename... Args> 447 Reference<A> EmplaceBack(Args&&... args); 448 449 Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to); 450 451 void Reserve(SizeType<A> requested_capacity); 452 453 void ShrinkToFit(); 454 455 void Swap(Storage* other_storage_ptr); 456 457 void SetIsAllocated() { 458 GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1); 459 } 460 461 void UnsetIsAllocated() { 462 GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1); 463 } 464 465 void SetSize(SizeType<A> size) { 466 GetSizeAndIsAllocated() = 467 (size << 1) | static_cast<SizeType<A>>(GetIsAllocated()); 468 } 469 470 void SetAllocatedSize(SizeType<A> size) { 471 GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1); 472 } 473 474 void SetInlinedSize(SizeType<A> size) { 475 GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1); 476 } 477 478 void AddSize(SizeType<A> count) { 479 GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1); 480 } 481 482 void SubtractSize(SizeType<A> count) { 483 ABSL_HARDENING_ASSERT(count <= GetSize()); 484 485 GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1); 486 } 487 488 void SetAllocation(Allocation<A> allocation) { 489 data_.allocated.allocated_data = allocation.data; 490 data_.allocated.allocated_capacity = allocation.capacity; 491 } 492 493 void MemcpyFrom(const Storage& other_storage) { 494 // Assumption check: it doesn't make sense to memcpy inlined elements unless 495 // we know the allocator doesn't do anything fancy, and one of the following 496 // holds: 497 // 498 // * The elements are trivially relocatable. 499 // 500 // * It's possible to trivially assign the elements and then destroy the 501 // source. 502 // 503 // * It's possible to trivially copy construct/assign the elements. 504 // 505 { 506 using V = ValueType<A>; 507 ABSL_HARDENING_ASSERT( 508 other_storage.GetIsAllocated() || 509 (std::is_same<A, std::allocator<V>>::value && 510 ( 511 // First case above 512 absl::is_trivially_relocatable<V>::value || 513 // Second case above 514 (absl::is_trivially_move_assignable<V>::value && 515 absl::is_trivially_destructible<V>::value) || 516 // Third case above 517 (absl::is_trivially_copy_constructible<V>::value || 518 absl::is_trivially_copy_assignable<V>::value)))); 519 } 520 521 GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); 522 data_ = other_storage.data_; 523 } 524 525 void DeallocateIfAllocated() { 526 if (GetIsAllocated()) { 527 MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(), 528 GetAllocatedCapacity()); 529 } 530 } 531 532 private: 533 ABSL_ATTRIBUTE_NOINLINE void DestroyContents(); 534 535 using Metadata = container_internal::CompressedTuple<A, SizeType<A>>; 536 537 struct Allocated { 538 Pointer<A> allocated_data; 539 SizeType<A> allocated_capacity; 540 }; 541 542 // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the 543 // `InlinedVector`. Sometimes, it is possible to increase the capacity (from 544 // the user requested `N`) without increasing the size of the `InlinedVector`. 545 static constexpr size_t kOptimalInlinedSize = 546 (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>)); 547 548 struct Inlined { 549 alignas(ValueType<A>) char inlined_data[sizeof( 550 ValueType<A>[kOptimalInlinedSize])]; 551 }; 552 553 union Data { 554 Allocated allocated; 555 Inlined inlined; 556 }; 557 558 void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n); 559 void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n); 560 561 void SwapInlinedElements(MemcpyPolicy, Storage* other); 562 template <typename NotMemcpyPolicy> 563 void SwapInlinedElements(NotMemcpyPolicy, Storage* other); 564 565 template <typename... Args> 566 ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args); 567 568 Metadata metadata_; 569 Data data_; 570 }; 571 572 template <typename T, size_t N, typename A> 573 void Storage<T, N, A>::DestroyContents() { 574 Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); 575 DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize()); 576 DeallocateIfAllocated(); 577 } 578 579 template <typename T, size_t N, typename A> 580 void Storage<T, N, A>::InitFrom(const Storage& other) { 581 const SizeType<A> n = other.GetSize(); 582 ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller. 583 ConstPointer<A> src; 584 Pointer<A> dst; 585 if (!other.GetIsAllocated()) { 586 dst = GetInlinedData(); 587 src = other.GetInlinedData(); 588 } else { 589 // Because this is only called from the `InlinedVector` constructors, it's 590 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 591 // throws, deallocation will be automatically handled by `~Storage()`. 592 SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n); 593 Allocation<A> allocation = 594 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 595 SetAllocation(allocation); 596 dst = allocation.data; 597 src = other.GetAllocatedData(); 598 } 599 600 // Fast path: if the value type is trivially copy constructible and we know 601 // the allocator doesn't do anything fancy, then we know it is legal for us to 602 // simply memcpy the other vector's elements. 603 if (absl::is_trivially_copy_constructible<ValueType<A>>::value && 604 std::is_same<A, std::allocator<ValueType<A>>>::value) { 605 std::memcpy(reinterpret_cast<char*>(dst), 606 reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>)); 607 } else { 608 auto values = IteratorValueAdapter<A, ConstPointer<A>>(src); 609 ConstructElements<A>(GetAllocator(), dst, values, n); 610 } 611 612 GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated(); 613 } 614 615 template <typename T, size_t N, typename A> 616 template <typename ValueAdapter> 617 auto Storage<T, N, A>::Initialize(ValueAdapter values, 618 SizeType<A> new_size) -> void { 619 // Only callable from constructors! 620 ABSL_HARDENING_ASSERT(!GetIsAllocated()); 621 ABSL_HARDENING_ASSERT(GetSize() == 0); 622 623 Pointer<A> construct_data; 624 if (new_size > GetInlinedCapacity()) { 625 // Because this is only called from the `InlinedVector` constructors, it's 626 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 627 // throws, deallocation will be automatically handled by `~Storage()`. 628 SizeType<A> requested_capacity = 629 ComputeCapacity(GetInlinedCapacity(), new_size); 630 Allocation<A> allocation = 631 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 632 construct_data = allocation.data; 633 SetAllocation(allocation); 634 SetIsAllocated(); 635 } else { 636 construct_data = GetInlinedData(); 637 } 638 639 ConstructElements<A>(GetAllocator(), construct_data, values, new_size); 640 641 // Since the initial size was guaranteed to be `0` and the allocated bit is 642 // already correct for either case, *adding* `new_size` gives us the correct 643 // result faster than setting it directly. 644 AddSize(new_size); 645 } 646 647 template <typename T, size_t N, typename A> 648 template <typename ValueAdapter> 649 auto Storage<T, N, A>::Assign(ValueAdapter values, 650 SizeType<A> new_size) -> void { 651 StorageView<A> storage_view = MakeStorageView(); 652 653 AllocationTransaction<A> allocation_tx(GetAllocator()); 654 655 absl::Span<ValueType<A>> assign_loop; 656 absl::Span<ValueType<A>> construct_loop; 657 absl::Span<ValueType<A>> destroy_loop; 658 659 if (new_size > storage_view.capacity) { 660 SizeType<A> requested_capacity = 661 ComputeCapacity(storage_view.capacity, new_size); 662 construct_loop = {allocation_tx.Allocate(requested_capacity), new_size}; 663 destroy_loop = {storage_view.data, storage_view.size}; 664 } else if (new_size > storage_view.size) { 665 assign_loop = {storage_view.data, storage_view.size}; 666 construct_loop = {storage_view.data + storage_view.size, 667 new_size - storage_view.size}; 668 } else { 669 assign_loop = {storage_view.data, new_size}; 670 destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; 671 } 672 673 AssignElements<A>(assign_loop.data(), values, assign_loop.size()); 674 675 ConstructElements<A>(GetAllocator(), construct_loop.data(), values, 676 construct_loop.size()); 677 678 DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(), 679 destroy_loop.size()); 680 681 if (allocation_tx.DidAllocate()) { 682 DeallocateIfAllocated(); 683 SetAllocation(std::move(allocation_tx).Release()); 684 SetIsAllocated(); 685 } 686 687 SetSize(new_size); 688 } 689 690 template <typename T, size_t N, typename A> 691 template <typename ValueAdapter> 692 auto Storage<T, N, A>::Resize(ValueAdapter values, 693 SizeType<A> new_size) -> void { 694 StorageView<A> storage_view = MakeStorageView(); 695 Pointer<A> const base = storage_view.data; 696 const SizeType<A> size = storage_view.size; 697 A& alloc = GetAllocator(); 698 if (new_size <= size) { 699 // Destroy extra old elements. 700 DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size); 701 } else if (new_size <= storage_view.capacity) { 702 // Construct new elements in place. 703 ConstructElements<A>(alloc, base + size, values, new_size - size); 704 } else { 705 // Steps: 706 // a. Allocate new backing store. 707 // b. Construct new elements in new backing store. 708 // c. Move existing elements from old backing store to new backing store. 709 // d. Destroy all elements in old backing store. 710 // Use transactional wrappers for the first two steps so we can roll 711 // back if necessary due to exceptions. 712 AllocationTransaction<A> allocation_tx(alloc); 713 SizeType<A> requested_capacity = 714 ComputeCapacity(storage_view.capacity, new_size); 715 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 716 717 ConstructionTransaction<A> construction_tx(alloc); 718 construction_tx.Construct(new_data + size, values, new_size - size); 719 720 IteratorValueAdapter<A, MoveIterator<A>> move_values( 721 (MoveIterator<A>(base))); 722 ConstructElements<A>(alloc, new_data, move_values, size); 723 724 DestroyAdapter<A>::DestroyElements(alloc, base, size); 725 std::move(construction_tx).Commit(); 726 DeallocateIfAllocated(); 727 SetAllocation(std::move(allocation_tx).Release()); 728 SetIsAllocated(); 729 } 730 SetSize(new_size); 731 } 732 733 template <typename T, size_t N, typename A> 734 template <typename ValueAdapter> 735 auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values, 736 SizeType<A> insert_count) -> Iterator<A> { 737 StorageView<A> storage_view = MakeStorageView(); 738 739 auto insert_index = static_cast<SizeType<A>>( 740 std::distance(ConstIterator<A>(storage_view.data), pos)); 741 SizeType<A> insert_end_index = insert_index + insert_count; 742 SizeType<A> new_size = storage_view.size + insert_count; 743 744 if (new_size > storage_view.capacity) { 745 AllocationTransaction<A> allocation_tx(GetAllocator()); 746 ConstructionTransaction<A> construction_tx(GetAllocator()); 747 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 748 749 IteratorValueAdapter<A, MoveIterator<A>> move_values( 750 MoveIterator<A>(storage_view.data)); 751 752 SizeType<A> requested_capacity = 753 ComputeCapacity(storage_view.capacity, new_size); 754 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 755 756 construction_tx.Construct(new_data + insert_index, values, insert_count); 757 758 move_construction_tx.Construct(new_data, move_values, insert_index); 759 760 ConstructElements<A>(GetAllocator(), new_data + insert_end_index, 761 move_values, storage_view.size - insert_index); 762 763 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 764 storage_view.size); 765 766 std::move(construction_tx).Commit(); 767 std::move(move_construction_tx).Commit(); 768 DeallocateIfAllocated(); 769 SetAllocation(std::move(allocation_tx).Release()); 770 771 SetAllocatedSize(new_size); 772 return Iterator<A>(new_data + insert_index); 773 } else { 774 SizeType<A> move_construction_destination_index = 775 (std::max)(insert_end_index, storage_view.size); 776 777 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 778 779 IteratorValueAdapter<A, MoveIterator<A>> move_construction_values( 780 MoveIterator<A>(storage_view.data + 781 (move_construction_destination_index - insert_count))); 782 absl::Span<ValueType<A>> move_construction = { 783 storage_view.data + move_construction_destination_index, 784 new_size - move_construction_destination_index}; 785 786 Pointer<A> move_assignment_values = storage_view.data + insert_index; 787 absl::Span<ValueType<A>> move_assignment = { 788 storage_view.data + insert_end_index, 789 move_construction_destination_index - insert_end_index}; 790 791 absl::Span<ValueType<A>> insert_assignment = {move_assignment_values, 792 move_construction.size()}; 793 794 absl::Span<ValueType<A>> insert_construction = { 795 insert_assignment.data() + insert_assignment.size(), 796 insert_count - insert_assignment.size()}; 797 798 move_construction_tx.Construct(move_construction.data(), 799 move_construction_values, 800 move_construction.size()); 801 802 for (Pointer<A> 803 destination = move_assignment.data() + move_assignment.size(), 804 last_destination = move_assignment.data(), 805 source = move_assignment_values + move_assignment.size(); 806 ;) { 807 --destination; 808 --source; 809 if (destination < last_destination) break; 810 *destination = std::move(*source); 811 } 812 813 AssignElements<A>(insert_assignment.data(), values, 814 insert_assignment.size()); 815 816 ConstructElements<A>(GetAllocator(), insert_construction.data(), values, 817 insert_construction.size()); 818 819 std::move(move_construction_tx).Commit(); 820 821 AddSize(insert_count); 822 return Iterator<A>(storage_view.data + insert_index); 823 } 824 } 825 826 template <typename T, size_t N, typename A> 827 template <typename... Args> 828 auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> { 829 StorageView<A> storage_view = MakeStorageView(); 830 const SizeType<A> n = storage_view.size; 831 if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) { 832 // Fast path; new element fits. 833 Pointer<A> last_ptr = storage_view.data + n; 834 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 835 std::forward<Args>(args)...); 836 AddSize(1); 837 return *last_ptr; 838 } 839 // TODO(b/173712035): Annotate with musttail attribute to prevent regression. 840 return EmplaceBackSlow(std::forward<Args>(args)...); 841 } 842 843 template <typename T, size_t N, typename A> 844 template <typename... Args> 845 auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> { 846 StorageView<A> storage_view = MakeStorageView(); 847 AllocationTransaction<A> allocation_tx(GetAllocator()); 848 IteratorValueAdapter<A, MoveIterator<A>> move_values( 849 MoveIterator<A>(storage_view.data)); 850 SizeType<A> requested_capacity = NextCapacity(storage_view.capacity); 851 Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity); 852 Pointer<A> last_ptr = construct_data + storage_view.size; 853 854 // Construct new element. 855 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 856 std::forward<Args>(args)...); 857 // Move elements from old backing store to new backing store. 858 ABSL_INTERNAL_TRY { 859 ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values, 860 storage_view.size); 861 } 862 ABSL_INTERNAL_CATCH_ANY { 863 AllocatorTraits<A>::destroy(GetAllocator(), last_ptr); 864 ABSL_INTERNAL_RETHROW; 865 } 866 // Destroy elements in old backing store. 867 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 868 storage_view.size); 869 870 DeallocateIfAllocated(); 871 SetAllocation(std::move(allocation_tx).Release()); 872 SetIsAllocated(); 873 AddSize(1); 874 return *last_ptr; 875 } 876 877 template <typename T, size_t N, typename A> 878 auto Storage<T, N, A>::Erase(ConstIterator<A> from, 879 ConstIterator<A> to) -> Iterator<A> { 880 StorageView<A> storage_view = MakeStorageView(); 881 882 auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); 883 auto erase_index = static_cast<SizeType<A>>( 884 std::distance(ConstIterator<A>(storage_view.data), from)); 885 SizeType<A> erase_end_index = erase_index + erase_size; 886 887 // Fast path: if the value type is trivially relocatable and we know 888 // the allocator doesn't do anything fancy, then we know it is legal for us to 889 // simply destroy the elements in the "erasure window" (which cannot throw) 890 // and then memcpy downward to close the window. 891 if (absl::is_trivially_relocatable<ValueType<A>>::value && 892 std::is_nothrow_destructible<ValueType<A>>::value && 893 std::is_same<A, std::allocator<ValueType<A>>>::value) { 894 DestroyAdapter<A>::DestroyElements( 895 GetAllocator(), storage_view.data + erase_index, erase_size); 896 std::memmove( 897 reinterpret_cast<char*>(storage_view.data + erase_index), 898 reinterpret_cast<const char*>(storage_view.data + erase_end_index), 899 (storage_view.size - erase_end_index) * sizeof(ValueType<A>)); 900 } else { 901 IteratorValueAdapter<A, MoveIterator<A>> move_values( 902 MoveIterator<A>(storage_view.data + erase_end_index)); 903 904 AssignElements<A>(storage_view.data + erase_index, move_values, 905 storage_view.size - erase_end_index); 906 907 DestroyAdapter<A>::DestroyElements( 908 GetAllocator(), storage_view.data + (storage_view.size - erase_size), 909 erase_size); 910 } 911 SubtractSize(erase_size); 912 return Iterator<A>(storage_view.data + erase_index); 913 } 914 915 template <typename T, size_t N, typename A> 916 auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void { 917 StorageView<A> storage_view = MakeStorageView(); 918 919 if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; 920 921 AllocationTransaction<A> allocation_tx(GetAllocator()); 922 923 IteratorValueAdapter<A, MoveIterator<A>> move_values( 924 MoveIterator<A>(storage_view.data)); 925 926 SizeType<A> new_requested_capacity = 927 ComputeCapacity(storage_view.capacity, requested_capacity); 928 Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity); 929 930 ConstructElements<A>(GetAllocator(), new_data, move_values, 931 storage_view.size); 932 933 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 934 storage_view.size); 935 936 DeallocateIfAllocated(); 937 SetAllocation(std::move(allocation_tx).Release()); 938 SetIsAllocated(); 939 } 940 941 template <typename T, size_t N, typename A> 942 auto Storage<T, N, A>::ShrinkToFit() -> void { 943 // May only be called on allocated instances! 944 ABSL_HARDENING_ASSERT(GetIsAllocated()); 945 946 StorageView<A> storage_view{GetAllocatedData(), GetSize(), 947 GetAllocatedCapacity()}; 948 949 if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return; 950 951 AllocationTransaction<A> allocation_tx(GetAllocator()); 952 953 IteratorValueAdapter<A, MoveIterator<A>> move_values( 954 MoveIterator<A>(storage_view.data)); 955 956 Pointer<A> construct_data; 957 if (storage_view.size > GetInlinedCapacity()) { 958 SizeType<A> requested_capacity = storage_view.size; 959 construct_data = allocation_tx.Allocate(requested_capacity); 960 if (allocation_tx.GetCapacity() >= storage_view.capacity) { 961 // Already using the smallest available heap allocation. 962 return; 963 } 964 } else { 965 construct_data = GetInlinedData(); 966 } 967 968 ABSL_INTERNAL_TRY { 969 ConstructElements<A>(GetAllocator(), construct_data, move_values, 970 storage_view.size); 971 } 972 ABSL_INTERNAL_CATCH_ANY { 973 SetAllocation({storage_view.data, storage_view.capacity}); 974 ABSL_INTERNAL_RETHROW; 975 } 976 977 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 978 storage_view.size); 979 980 MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data, 981 storage_view.capacity); 982 983 if (allocation_tx.DidAllocate()) { 984 SetAllocation(std::move(allocation_tx).Release()); 985 } else { 986 UnsetIsAllocated(); 987 } 988 } 989 990 template <typename T, size_t N, typename A> 991 auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void { 992 using std::swap; 993 ABSL_HARDENING_ASSERT(this != other_storage_ptr); 994 995 if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { 996 swap(data_.allocated, other_storage_ptr->data_.allocated); 997 } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { 998 SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr); 999 } else { 1000 Storage* allocated_ptr = this; 1001 Storage* inlined_ptr = other_storage_ptr; 1002 if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr); 1003 1004 StorageView<A> allocated_storage_view{ 1005 allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(), 1006 allocated_ptr->GetAllocatedCapacity()}; 1007 1008 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1009 MoveIterator<A>(inlined_ptr->GetInlinedData())); 1010 1011 ABSL_INTERNAL_TRY { 1012 ConstructElements<A>(inlined_ptr->GetAllocator(), 1013 allocated_ptr->GetInlinedData(), move_values, 1014 inlined_ptr->GetSize()); 1015 } 1016 ABSL_INTERNAL_CATCH_ANY { 1017 allocated_ptr->SetAllocation(Allocation<A>{ 1018 allocated_storage_view.data, allocated_storage_view.capacity}); 1019 ABSL_INTERNAL_RETHROW; 1020 } 1021 1022 DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(), 1023 inlined_ptr->GetInlinedData(), 1024 inlined_ptr->GetSize()); 1025 1026 inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data, 1027 allocated_storage_view.capacity}); 1028 } 1029 1030 swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated()); 1031 swap(GetAllocator(), other_storage_ptr->GetAllocator()); 1032 } 1033 1034 template <typename T, size_t N, typename A> 1035 void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other, 1036 SizeType<A> n) { 1037 std::swap_ranges(GetInlinedData(), GetInlinedData() + n, 1038 other->GetInlinedData()); 1039 } 1040 1041 template <typename T, size_t N, typename A> 1042 void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other, 1043 SizeType<A> n) { 1044 Pointer<A> a = GetInlinedData(); 1045 Pointer<A> b = other->GetInlinedData(); 1046 // see note on allocators in `SwapInlinedElements`. 1047 A& allocator_a = GetAllocator(); 1048 A& allocator_b = other->GetAllocator(); 1049 for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) { 1050 ValueType<A> tmp(std::move(*a)); 1051 1052 AllocatorTraits<A>::destroy(allocator_a, a); 1053 AllocatorTraits<A>::construct(allocator_b, a, std::move(*b)); 1054 1055 AllocatorTraits<A>::destroy(allocator_b, b); 1056 AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp)); 1057 } 1058 } 1059 1060 template <typename T, size_t N, typename A> 1061 void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) { 1062 Data tmp = data_; 1063 data_ = other->data_; 1064 other->data_ = tmp; 1065 } 1066 1067 template <typename T, size_t N, typename A> 1068 template <typename NotMemcpyPolicy> 1069 void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy, 1070 Storage* other) { 1071 // Note: `destroy` needs to use pre-swap allocator while `construct` - 1072 // post-swap allocator. Allocators will be swapped later on outside of 1073 // `SwapInlinedElements`. 1074 Storage* small_ptr = this; 1075 Storage* large_ptr = other; 1076 if (small_ptr->GetSize() > large_ptr->GetSize()) { 1077 std::swap(small_ptr, large_ptr); 1078 } 1079 1080 auto small_size = small_ptr->GetSize(); 1081 auto diff = large_ptr->GetSize() - small_size; 1082 SwapN(policy, other, small_size); 1083 1084 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1085 MoveIterator<A>(large_ptr->GetInlinedData() + small_size)); 1086 1087 ConstructElements<A>(large_ptr->GetAllocator(), 1088 small_ptr->GetInlinedData() + small_size, move_values, 1089 diff); 1090 1091 DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(), 1092 large_ptr->GetInlinedData() + small_size, 1093 diff); 1094 } 1095 1096 // End ignore "array-bounds" 1097 #if !defined(__clang__) && defined(__GNUC__) 1098 #pragma GCC diagnostic pop 1099 #endif 1100 1101 } // namespace inlined_vector_internal 1102 ABSL_NAMESPACE_END 1103 } // namespace absl 1104 1105 #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 1106