1 // Copyright 2019 The Abseil Authors. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // https://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 16 #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 17 18 #include <algorithm> 19 #include <cstddef> 20 #include <cstring> 21 #include <iterator> 22 #include <limits> 23 #include <memory> 24 #include <new> 25 #include <type_traits> 26 #include <utility> 27 28 #include "absl/base/attributes.h" 29 #include "absl/base/config.h" 30 #include "absl/base/internal/identity.h" 31 #include "absl/base/macros.h" 32 #include "absl/container/internal/compressed_tuple.h" 33 #include "absl/memory/memory.h" 34 #include "absl/meta/type_traits.h" 35 #include "absl/types/span.h" 36 37 namespace absl { 38 ABSL_NAMESPACE_BEGIN 39 namespace inlined_vector_internal { 40 41 // GCC does not deal very well with the below code 42 #if !defined(__clang__) && defined(__GNUC__) 43 #pragma GCC diagnostic push 44 #pragma GCC diagnostic ignored "-Warray-bounds" 45 #endif 46 47 template <typename A> 48 using AllocatorTraits = std::allocator_traits<A>; 49 template <typename A> 50 using ValueType = typename AllocatorTraits<A>::value_type; 51 template <typename A> 52 using SizeType = typename AllocatorTraits<A>::size_type; 53 template <typename A> 54 using Pointer = typename AllocatorTraits<A>::pointer; 55 template <typename A> 56 using ConstPointer = typename AllocatorTraits<A>::const_pointer; 57 template <typename A> 58 using SizeType = typename AllocatorTraits<A>::size_type; 59 template <typename A> 60 using DifferenceType = typename AllocatorTraits<A>::difference_type; 61 template <typename A> 62 using Reference = ValueType<A>&; 63 template <typename A> 64 using ConstReference = const ValueType<A>&; 65 template <typename A> 66 using Iterator = Pointer<A>; 67 template <typename A> 68 using ConstIterator = ConstPointer<A>; 69 template <typename A> 70 using ReverseIterator = typename std::reverse_iterator<Iterator<A>>; 71 template <typename A> 72 using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>; 73 template <typename A> 74 using MoveIterator = typename std::move_iterator<Iterator<A>>; 75 76 template <typename Iterator> 77 using IsAtLeastForwardIterator = std::is_convertible< 78 typename std::iterator_traits<Iterator>::iterator_category, 79 std::forward_iterator_tag>; 80 81 template <typename A> 82 using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>; 83 template <typename A> 84 using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>; 85 86 template <typename A, 87 bool IsTriviallyDestructible = 88 absl::is_trivially_destructible<ValueType<A>>::value && 89 std::is_same<A, std::allocator<ValueType<A>>>::value> 90 struct DestroyAdapter; 91 92 template <typename A> 93 struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> { 94 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 95 SizeType<A> destroy_size) { 96 for (SizeType<A> i = destroy_size; i != 0;) { 97 --i; 98 AllocatorTraits<A>::destroy(allocator, destroy_first + i); 99 } 100 } 101 }; 102 103 template <typename A> 104 struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> { 105 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 106 SizeType<A> destroy_size) { 107 static_cast<void>(allocator); 108 static_cast<void>(destroy_first); 109 static_cast<void>(destroy_size); 110 } 111 }; 112 113 template <typename A> 114 struct Allocation { 115 Pointer<A> data = nullptr; 116 SizeType<A> capacity = 0; 117 }; 118 119 template <typename A, 120 bool IsOverAligned = 121 (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)> 122 struct MallocAdapter { 123 static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) { 124 return {AllocatorTraits<A>::allocate(allocator, requested_capacity), 125 requested_capacity}; 126 } 127 128 static void Deallocate(A& allocator, Pointer<A> pointer, 129 SizeType<A> capacity) { 130 AllocatorTraits<A>::deallocate(allocator, pointer, capacity); 131 } 132 }; 133 134 template <typename A, typename ValueAdapter> 135 void ConstructElements(absl::internal::type_identity_t<A>& allocator, 136 Pointer<A> construct_first, ValueAdapter& values, 137 SizeType<A> construct_size) { 138 for (SizeType<A> i = 0; i < construct_size; ++i) { 139 ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); } 140 ABSL_INTERNAL_CATCH_ANY { 141 DestroyAdapter<A>::DestroyElements(allocator, construct_first, i); 142 ABSL_INTERNAL_RETHROW; 143 } 144 } 145 } 146 147 template <typename A, typename ValueAdapter> 148 void AssignElements(Pointer<A> assign_first, ValueAdapter& values, 149 SizeType<A> assign_size) { 150 for (SizeType<A> i = 0; i < assign_size; ++i) { 151 values.AssignNext(assign_first + i); 152 } 153 } 154 155 template <typename A> 156 struct StorageView { 157 Pointer<A> data; 158 SizeType<A> size; 159 SizeType<A> capacity; 160 }; 161 162 template <typename A, typename Iterator> 163 class IteratorValueAdapter { 164 public: 165 explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} 166 167 void ConstructNext(A& allocator, Pointer<A> construct_at) { 168 AllocatorTraits<A>::construct(allocator, construct_at, *it_); 169 ++it_; 170 } 171 172 void AssignNext(Pointer<A> assign_at) { 173 *assign_at = *it_; 174 ++it_; 175 } 176 177 private: 178 Iterator it_; 179 }; 180 181 template <typename A> 182 class CopyValueAdapter { 183 public: 184 explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {} 185 186 void ConstructNext(A& allocator, Pointer<A> construct_at) { 187 AllocatorTraits<A>::construct(allocator, construct_at, *ptr_); 188 } 189 190 void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; } 191 192 private: 193 ConstPointer<A> ptr_; 194 }; 195 196 template <typename A> 197 class DefaultValueAdapter { 198 public: 199 explicit DefaultValueAdapter() {} 200 201 void ConstructNext(A& allocator, Pointer<A> construct_at) { 202 AllocatorTraits<A>::construct(allocator, construct_at); 203 } 204 205 void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); } 206 }; 207 208 template <typename A> 209 class AllocationTransaction { 210 public: 211 explicit AllocationTransaction(A& allocator) 212 : allocator_data_(allocator, nullptr), capacity_(0) {} 213 214 ~AllocationTransaction() { 215 if (DidAllocate()) { 216 MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity()); 217 } 218 } 219 220 AllocationTransaction(const AllocationTransaction&) = delete; 221 void operator=(const AllocationTransaction&) = delete; 222 223 A& GetAllocator() { return allocator_data_.template get<0>(); } 224 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 225 SizeType<A>& GetCapacity() { return capacity_; } 226 227 bool DidAllocate() { return GetData() != nullptr; } 228 229 Pointer<A> Allocate(SizeType<A> requested_capacity) { 230 Allocation<A> result = 231 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 232 GetData() = result.data; 233 GetCapacity() = result.capacity; 234 return result.data; 235 } 236 237 ABSL_MUST_USE_RESULT Allocation<A> Release() && { 238 Allocation<A> result = {GetData(), GetCapacity()}; 239 Reset(); 240 return result; 241 } 242 243 private: 244 void Reset() { 245 GetData() = nullptr; 246 GetCapacity() = 0; 247 } 248 249 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 250 SizeType<A> capacity_; 251 }; 252 253 template <typename A> 254 class ConstructionTransaction { 255 public: 256 explicit ConstructionTransaction(A& allocator) 257 : allocator_data_(allocator, nullptr), size_(0) {} 258 259 ~ConstructionTransaction() { 260 if (DidConstruct()) { 261 DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize()); 262 } 263 } 264 265 ConstructionTransaction(const ConstructionTransaction&) = delete; 266 void operator=(const ConstructionTransaction&) = delete; 267 268 A& GetAllocator() { return allocator_data_.template get<0>(); } 269 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 270 SizeType<A>& GetSize() { return size_; } 271 272 bool DidConstruct() { return GetData() != nullptr; } 273 template <typename ValueAdapter> 274 void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) { 275 ConstructElements<A>(GetAllocator(), data, values, size); 276 GetData() = data; 277 GetSize() = size; 278 } 279 void Commit() && { 280 GetData() = nullptr; 281 GetSize() = 0; 282 } 283 284 private: 285 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 286 SizeType<A> size_; 287 }; 288 289 template <typename T, size_t N, typename A> 290 class Storage { 291 public: 292 struct MemcpyPolicy {}; 293 struct ElementwiseAssignPolicy {}; 294 struct ElementwiseSwapPolicy {}; 295 struct ElementwiseConstructPolicy {}; 296 297 using MoveAssignmentPolicy = absl::conditional_t< 298 // Fast path: if the value type can be trivially move assigned and 299 // destroyed, and we know the allocator doesn't do anything fancy, then 300 // it's safe for us to simply adopt the contents of the storage for 301 // `other` and remove its own reference to them. It's as if we had 302 // individually move-assigned each value and then destroyed the original. 303 absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>, 304 absl::is_trivially_destructible<ValueType<A>>, 305 std::is_same<A, std::allocator<ValueType<A>>>>::value, 306 MemcpyPolicy, 307 // Otherwise we use move assignment if possible. If not, we simulate 308 // move assignment using move construction. 309 // 310 // Note that this is in contrast to e.g. std::vector and std::optional, 311 // which are themselves not move-assignable when their contained type is 312 // not. 313 absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy, 314 ElementwiseConstructPolicy>>; 315 316 // The policy to be used specifically when swapping inlined elements. 317 using SwapInlinedElementsPolicy = absl::conditional_t< 318 // Fast path: if the value type can be trivially relocated, and we 319 // know the allocator doesn't do anything fancy, then it's safe for us 320 // to simply swap the bytes in the inline storage. It's as if we had 321 // relocated the first vector's elements into temporary storage, 322 // relocated the second's elements into the (now-empty) first's, 323 // and then relocated from temporary storage into the second. 324 absl::conjunction<absl::is_trivially_relocatable<ValueType<A>>, 325 std::is_same<A, std::allocator<ValueType<A>>>>::value, 326 MemcpyPolicy, 327 absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy, 328 ElementwiseConstructPolicy>>; 329 330 static SizeType<A> NextCapacity(SizeType<A> current_capacity) { 331 return current_capacity * 2; 332 } 333 334 static SizeType<A> ComputeCapacity(SizeType<A> current_capacity, 335 SizeType<A> requested_capacity) { 336 return (std::max)(NextCapacity(current_capacity), requested_capacity); 337 } 338 339 // --------------------------------------------------------------------------- 340 // Storage Constructors and Destructor 341 // --------------------------------------------------------------------------- 342 343 Storage() : metadata_(A(), /* size and is_allocated */ 0u) {} 344 345 explicit Storage(const A& allocator) 346 : metadata_(allocator, /* size and is_allocated */ 0u) {} 347 348 ~Storage() { 349 // Fast path: if we are empty and not allocated, there's nothing to do. 350 if (GetSizeAndIsAllocated() == 0) { 351 return; 352 } 353 354 // Fast path: if no destructors need to be run and we know the allocator 355 // doesn't do anything fancy, then all we need to do is deallocate (and 356 // maybe not even that). 357 if (absl::is_trivially_destructible<ValueType<A>>::value && 358 std::is_same<A, std::allocator<ValueType<A>>>::value) { 359 DeallocateIfAllocated(); 360 return; 361 } 362 363 DestroyContents(); 364 } 365 366 // --------------------------------------------------------------------------- 367 // Storage Member Accessors 368 // --------------------------------------------------------------------------- 369 370 SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } 371 372 const SizeType<A>& GetSizeAndIsAllocated() const { 373 return metadata_.template get<1>(); 374 } 375 376 SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } 377 378 bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } 379 380 Pointer<A> GetAllocatedData() { 381 // GCC 12 has a false-positive -Wmaybe-uninitialized warning here. 382 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) 383 #pragma GCC diagnostic push 384 #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" 385 #endif 386 return data_.allocated.allocated_data; 387 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) 388 #pragma GCC diagnostic pop 389 #endif 390 } 391 392 ConstPointer<A> GetAllocatedData() const { 393 return data_.allocated.allocated_data; 394 } 395 396 // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be 397 // uninitialized, a common pattern in allocate()+construct() APIs. 398 // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking 399 // NOTE: When this was written, LLVM documentation did not explicitly 400 // mention that casting `char*` and using `reinterpret_cast` qualifies 401 // as a bad cast. 402 ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { 403 return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); 404 } 405 406 ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const { 407 return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data); 408 } 409 410 SizeType<A> GetAllocatedCapacity() const { 411 return data_.allocated.allocated_capacity; 412 } 413 414 SizeType<A> GetInlinedCapacity() const { 415 return static_cast<SizeType<A>>(kOptimalInlinedSize); 416 } 417 418 StorageView<A> MakeStorageView() { 419 return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(), 420 GetAllocatedCapacity()} 421 : StorageView<A>{GetInlinedData(), GetSize(), 422 GetInlinedCapacity()}; 423 } 424 425 A& GetAllocator() { return metadata_.template get<0>(); } 426 427 const A& GetAllocator() const { return metadata_.template get<0>(); } 428 429 // --------------------------------------------------------------------------- 430 // Storage Member Mutators 431 // --------------------------------------------------------------------------- 432 433 ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other); 434 435 template <typename ValueAdapter> 436 void Initialize(ValueAdapter values, SizeType<A> new_size); 437 438 template <typename ValueAdapter> 439 void Assign(ValueAdapter values, SizeType<A> new_size); 440 441 template <typename ValueAdapter> 442 void Resize(ValueAdapter values, SizeType<A> new_size); 443 444 template <typename ValueAdapter> 445 Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values, 446 SizeType<A> insert_count); 447 448 template <typename... Args> 449 Reference<A> EmplaceBack(Args&&... args); 450 451 Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to); 452 453 void Reserve(SizeType<A> requested_capacity); 454 455 void ShrinkToFit(); 456 457 void Swap(Storage* other_storage_ptr); 458 459 void SetIsAllocated() { 460 GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1); 461 } 462 463 void UnsetIsAllocated() { 464 GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1); 465 } 466 467 void SetSize(SizeType<A> size) { 468 GetSizeAndIsAllocated() = 469 (size << 1) | static_cast<SizeType<A>>(GetIsAllocated()); 470 } 471 472 void SetAllocatedSize(SizeType<A> size) { 473 GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1); 474 } 475 476 void SetInlinedSize(SizeType<A> size) { 477 GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1); 478 } 479 480 void AddSize(SizeType<A> count) { 481 GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1); 482 } 483 484 void SubtractSize(SizeType<A> count) { 485 ABSL_HARDENING_ASSERT(count <= GetSize()); 486 487 GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1); 488 } 489 490 void SetAllocation(Allocation<A> allocation) { 491 data_.allocated.allocated_data = allocation.data; 492 data_.allocated.allocated_capacity = allocation.capacity; 493 } 494 495 void MemcpyFrom(const Storage& other_storage) { 496 // Assumption check: it doesn't make sense to memcpy inlined elements unless 497 // we know the allocator doesn't do anything fancy, and one of the following 498 // holds: 499 // 500 // * The elements are trivially relocatable. 501 // 502 // * It's possible to trivially assign the elements and then destroy the 503 // source. 504 // 505 // * It's possible to trivially copy construct/assign the elements. 506 // 507 { 508 using V = ValueType<A>; 509 ABSL_HARDENING_ASSERT( 510 other_storage.GetIsAllocated() || 511 (std::is_same<A, std::allocator<V>>::value && 512 ( 513 // First case above 514 absl::is_trivially_relocatable<V>::value || 515 // Second case above 516 (absl::is_trivially_move_assignable<V>::value && 517 absl::is_trivially_destructible<V>::value) || 518 // Third case above 519 (absl::is_trivially_copy_constructible<V>::value || 520 absl::is_trivially_copy_assignable<V>::value)))); 521 } 522 523 GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); 524 data_ = other_storage.data_; 525 } 526 527 void DeallocateIfAllocated() { 528 if (GetIsAllocated()) { 529 MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(), 530 GetAllocatedCapacity()); 531 } 532 } 533 534 private: 535 ABSL_ATTRIBUTE_NOINLINE void DestroyContents(); 536 537 using Metadata = container_internal::CompressedTuple<A, SizeType<A>>; 538 539 struct Allocated { 540 Pointer<A> allocated_data; 541 SizeType<A> allocated_capacity; 542 }; 543 544 // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the 545 // `InlinedVector`. Sometimes, it is possible to increase the capacity (from 546 // the user requested `N`) without increasing the size of the `InlinedVector`. 547 static constexpr size_t kOptimalInlinedSize = 548 (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>)); 549 550 struct Inlined { 551 alignas(ValueType<A>) char inlined_data[sizeof( 552 ValueType<A>[kOptimalInlinedSize])]; 553 }; 554 555 union Data { 556 Allocated allocated; 557 Inlined inlined; 558 }; 559 560 void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n); 561 void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n); 562 563 void SwapInlinedElements(MemcpyPolicy, Storage* other); 564 template <typename NotMemcpyPolicy> 565 void SwapInlinedElements(NotMemcpyPolicy, Storage* other); 566 567 template <typename... Args> 568 ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args); 569 570 Metadata metadata_; 571 Data data_; 572 }; 573 574 template <typename T, size_t N, typename A> 575 void Storage<T, N, A>::DestroyContents() { 576 Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); 577 DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize()); 578 DeallocateIfAllocated(); 579 } 580 581 template <typename T, size_t N, typename A> 582 void Storage<T, N, A>::InitFrom(const Storage& other) { 583 const SizeType<A> n = other.GetSize(); 584 ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller. 585 ConstPointer<A> src; 586 Pointer<A> dst; 587 if (!other.GetIsAllocated()) { 588 dst = GetInlinedData(); 589 src = other.GetInlinedData(); 590 } else { 591 // Because this is only called from the `InlinedVector` constructors, it's 592 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 593 // throws, deallocation will be automatically handled by `~Storage()`. 594 SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n); 595 Allocation<A> allocation = 596 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 597 SetAllocation(allocation); 598 dst = allocation.data; 599 src = other.GetAllocatedData(); 600 } 601 602 // Fast path: if the value type is trivially copy constructible and we know 603 // the allocator doesn't do anything fancy, then we know it is legal for us to 604 // simply memcpy the other vector's elements. 605 if (absl::is_trivially_copy_constructible<ValueType<A>>::value && 606 std::is_same<A, std::allocator<ValueType<A>>>::value) { 607 std::memcpy(reinterpret_cast<char*>(dst), 608 reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>)); 609 } else { 610 auto values = IteratorValueAdapter<A, ConstPointer<A>>(src); 611 ConstructElements<A>(GetAllocator(), dst, values, n); 612 } 613 614 GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated(); 615 } 616 617 template <typename T, size_t N, typename A> 618 template <typename ValueAdapter> 619 auto Storage<T, N, A>::Initialize(ValueAdapter values, 620 SizeType<A> new_size) -> void { 621 // Only callable from constructors! 622 ABSL_HARDENING_ASSERT(!GetIsAllocated()); 623 ABSL_HARDENING_ASSERT(GetSize() == 0); 624 625 Pointer<A> construct_data; 626 if (new_size > GetInlinedCapacity()) { 627 // Because this is only called from the `InlinedVector` constructors, it's 628 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 629 // throws, deallocation will be automatically handled by `~Storage()`. 630 SizeType<A> requested_capacity = 631 ComputeCapacity(GetInlinedCapacity(), new_size); 632 Allocation<A> allocation = 633 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 634 construct_data = allocation.data; 635 SetAllocation(allocation); 636 SetIsAllocated(); 637 } else { 638 construct_data = GetInlinedData(); 639 } 640 641 ConstructElements<A>(GetAllocator(), construct_data, values, new_size); 642 643 // Since the initial size was guaranteed to be `0` and the allocated bit is 644 // already correct for either case, *adding* `new_size` gives us the correct 645 // result faster than setting it directly. 646 AddSize(new_size); 647 } 648 649 template <typename T, size_t N, typename A> 650 template <typename ValueAdapter> 651 auto Storage<T, N, A>::Assign(ValueAdapter values, 652 SizeType<A> new_size) -> void { 653 StorageView<A> storage_view = MakeStorageView(); 654 655 AllocationTransaction<A> allocation_tx(GetAllocator()); 656 657 absl::Span<ValueType<A>> assign_loop; 658 absl::Span<ValueType<A>> construct_loop; 659 absl::Span<ValueType<A>> destroy_loop; 660 661 if (new_size > storage_view.capacity) { 662 SizeType<A> requested_capacity = 663 ComputeCapacity(storage_view.capacity, new_size); 664 construct_loop = {allocation_tx.Allocate(requested_capacity), new_size}; 665 destroy_loop = {storage_view.data, storage_view.size}; 666 } else if (new_size > storage_view.size) { 667 assign_loop = {storage_view.data, storage_view.size}; 668 construct_loop = {storage_view.data + storage_view.size, 669 new_size - storage_view.size}; 670 } else { 671 assign_loop = {storage_view.data, new_size}; 672 destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; 673 } 674 675 AssignElements<A>(assign_loop.data(), values, assign_loop.size()); 676 677 ConstructElements<A>(GetAllocator(), construct_loop.data(), values, 678 construct_loop.size()); 679 680 DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(), 681 destroy_loop.size()); 682 683 if (allocation_tx.DidAllocate()) { 684 DeallocateIfAllocated(); 685 SetAllocation(std::move(allocation_tx).Release()); 686 SetIsAllocated(); 687 } 688 689 SetSize(new_size); 690 } 691 692 template <typename T, size_t N, typename A> 693 template <typename ValueAdapter> 694 auto Storage<T, N, A>::Resize(ValueAdapter values, 695 SizeType<A> new_size) -> void { 696 StorageView<A> storage_view = MakeStorageView(); 697 Pointer<A> const base = storage_view.data; 698 const SizeType<A> size = storage_view.size; 699 A& alloc = GetAllocator(); 700 if (new_size <= size) { 701 // Destroy extra old elements. 702 DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size); 703 } else if (new_size <= storage_view.capacity) { 704 // Construct new elements in place. 705 ConstructElements<A>(alloc, base + size, values, new_size - size); 706 } else { 707 // Steps: 708 // a. Allocate new backing store. 709 // b. Construct new elements in new backing store. 710 // c. Move existing elements from old backing store to new backing store. 711 // d. Destroy all elements in old backing store. 712 // Use transactional wrappers for the first two steps so we can roll 713 // back if necessary due to exceptions. 714 AllocationTransaction<A> allocation_tx(alloc); 715 SizeType<A> requested_capacity = 716 ComputeCapacity(storage_view.capacity, new_size); 717 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 718 719 ConstructionTransaction<A> construction_tx(alloc); 720 construction_tx.Construct(new_data + size, values, new_size - size); 721 722 IteratorValueAdapter<A, MoveIterator<A>> move_values( 723 (MoveIterator<A>(base))); 724 ConstructElements<A>(alloc, new_data, move_values, size); 725 726 DestroyAdapter<A>::DestroyElements(alloc, base, size); 727 std::move(construction_tx).Commit(); 728 DeallocateIfAllocated(); 729 SetAllocation(std::move(allocation_tx).Release()); 730 SetIsAllocated(); 731 } 732 SetSize(new_size); 733 } 734 735 template <typename T, size_t N, typename A> 736 template <typename ValueAdapter> 737 auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values, 738 SizeType<A> insert_count) -> Iterator<A> { 739 StorageView<A> storage_view = MakeStorageView(); 740 741 auto insert_index = static_cast<SizeType<A>>( 742 std::distance(ConstIterator<A>(storage_view.data), pos)); 743 SizeType<A> insert_end_index = insert_index + insert_count; 744 SizeType<A> new_size = storage_view.size + insert_count; 745 746 if (new_size > storage_view.capacity) { 747 AllocationTransaction<A> allocation_tx(GetAllocator()); 748 ConstructionTransaction<A> construction_tx(GetAllocator()); 749 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 750 751 IteratorValueAdapter<A, MoveIterator<A>> move_values( 752 MoveIterator<A>(storage_view.data)); 753 754 SizeType<A> requested_capacity = 755 ComputeCapacity(storage_view.capacity, new_size); 756 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 757 758 construction_tx.Construct(new_data + insert_index, values, insert_count); 759 760 move_construction_tx.Construct(new_data, move_values, insert_index); 761 762 ConstructElements<A>(GetAllocator(), new_data + insert_end_index, 763 move_values, storage_view.size - insert_index); 764 765 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 766 storage_view.size); 767 768 std::move(construction_tx).Commit(); 769 std::move(move_construction_tx).Commit(); 770 DeallocateIfAllocated(); 771 SetAllocation(std::move(allocation_tx).Release()); 772 773 SetAllocatedSize(new_size); 774 return Iterator<A>(new_data + insert_index); 775 } else { 776 SizeType<A> move_construction_destination_index = 777 (std::max)(insert_end_index, storage_view.size); 778 779 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 780 781 IteratorValueAdapter<A, MoveIterator<A>> move_construction_values( 782 MoveIterator<A>(storage_view.data + 783 (move_construction_destination_index - insert_count))); 784 absl::Span<ValueType<A>> move_construction = { 785 storage_view.data + move_construction_destination_index, 786 new_size - move_construction_destination_index}; 787 788 Pointer<A> move_assignment_values = storage_view.data + insert_index; 789 absl::Span<ValueType<A>> move_assignment = { 790 storage_view.data + insert_end_index, 791 move_construction_destination_index - insert_end_index}; 792 793 absl::Span<ValueType<A>> insert_assignment = {move_assignment_values, 794 move_construction.size()}; 795 796 absl::Span<ValueType<A>> insert_construction = { 797 insert_assignment.data() + insert_assignment.size(), 798 insert_count - insert_assignment.size()}; 799 800 move_construction_tx.Construct(move_construction.data(), 801 move_construction_values, 802 move_construction.size()); 803 804 for (Pointer<A> 805 destination = move_assignment.data() + move_assignment.size(), 806 last_destination = move_assignment.data(), 807 source = move_assignment_values + move_assignment.size(); 808 ;) { 809 --destination; 810 --source; 811 if (destination < last_destination) break; 812 *destination = std::move(*source); 813 } 814 815 AssignElements<A>(insert_assignment.data(), values, 816 insert_assignment.size()); 817 818 ConstructElements<A>(GetAllocator(), insert_construction.data(), values, 819 insert_construction.size()); 820 821 std::move(move_construction_tx).Commit(); 822 823 AddSize(insert_count); 824 return Iterator<A>(storage_view.data + insert_index); 825 } 826 } 827 828 template <typename T, size_t N, typename A> 829 template <typename... Args> 830 auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> { 831 StorageView<A> storage_view = MakeStorageView(); 832 const SizeType<A> n = storage_view.size; 833 if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) { 834 // Fast path; new element fits. 835 Pointer<A> last_ptr = storage_view.data + n; 836 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 837 std::forward<Args>(args)...); 838 AddSize(1); 839 return *last_ptr; 840 } 841 // TODO(b/173712035): Annotate with musttail attribute to prevent regression. 842 return EmplaceBackSlow(std::forward<Args>(args)...); 843 } 844 845 template <typename T, size_t N, typename A> 846 template <typename... Args> 847 auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> { 848 StorageView<A> storage_view = MakeStorageView(); 849 AllocationTransaction<A> allocation_tx(GetAllocator()); 850 IteratorValueAdapter<A, MoveIterator<A>> move_values( 851 MoveIterator<A>(storage_view.data)); 852 SizeType<A> requested_capacity = NextCapacity(storage_view.capacity); 853 Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity); 854 Pointer<A> last_ptr = construct_data + storage_view.size; 855 856 // Construct new element. 857 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 858 std::forward<Args>(args)...); 859 // Move elements from old backing store to new backing store. 860 ABSL_INTERNAL_TRY { 861 ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values, 862 storage_view.size); 863 } 864 ABSL_INTERNAL_CATCH_ANY { 865 AllocatorTraits<A>::destroy(GetAllocator(), last_ptr); 866 ABSL_INTERNAL_RETHROW; 867 } 868 // Destroy elements in old backing store. 869 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 870 storage_view.size); 871 872 DeallocateIfAllocated(); 873 SetAllocation(std::move(allocation_tx).Release()); 874 SetIsAllocated(); 875 AddSize(1); 876 return *last_ptr; 877 } 878 879 template <typename T, size_t N, typename A> 880 auto Storage<T, N, A>::Erase(ConstIterator<A> from, 881 ConstIterator<A> to) -> Iterator<A> { 882 StorageView<A> storage_view = MakeStorageView(); 883 884 auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); 885 auto erase_index = static_cast<SizeType<A>>( 886 std::distance(ConstIterator<A>(storage_view.data), from)); 887 SizeType<A> erase_end_index = erase_index + erase_size; 888 889 // Fast path: if the value type is trivially relocatable and we know 890 // the allocator doesn't do anything fancy, then we know it is legal for us to 891 // simply destroy the elements in the "erasure window" (which cannot throw) 892 // and then memcpy downward to close the window. 893 if (absl::is_trivially_relocatable<ValueType<A>>::value && 894 std::is_nothrow_destructible<ValueType<A>>::value && 895 std::is_same<A, std::allocator<ValueType<A>>>::value) { 896 DestroyAdapter<A>::DestroyElements( 897 GetAllocator(), storage_view.data + erase_index, erase_size); 898 std::memmove( 899 reinterpret_cast<char*>(storage_view.data + erase_index), 900 reinterpret_cast<const char*>(storage_view.data + erase_end_index), 901 (storage_view.size - erase_end_index) * sizeof(ValueType<A>)); 902 } else { 903 IteratorValueAdapter<A, MoveIterator<A>> move_values( 904 MoveIterator<A>(storage_view.data + erase_end_index)); 905 906 AssignElements<A>(storage_view.data + erase_index, move_values, 907 storage_view.size - erase_end_index); 908 909 DestroyAdapter<A>::DestroyElements( 910 GetAllocator(), storage_view.data + (storage_view.size - erase_size), 911 erase_size); 912 } 913 SubtractSize(erase_size); 914 return Iterator<A>(storage_view.data + erase_index); 915 } 916 917 template <typename T, size_t N, typename A> 918 auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void { 919 StorageView<A> storage_view = MakeStorageView(); 920 921 if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; 922 923 AllocationTransaction<A> allocation_tx(GetAllocator()); 924 925 IteratorValueAdapter<A, MoveIterator<A>> move_values( 926 MoveIterator<A>(storage_view.data)); 927 928 SizeType<A> new_requested_capacity = 929 ComputeCapacity(storage_view.capacity, requested_capacity); 930 Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity); 931 932 ConstructElements<A>(GetAllocator(), new_data, move_values, 933 storage_view.size); 934 935 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 936 storage_view.size); 937 938 DeallocateIfAllocated(); 939 SetAllocation(std::move(allocation_tx).Release()); 940 SetIsAllocated(); 941 } 942 943 template <typename T, size_t N, typename A> 944 auto Storage<T, N, A>::ShrinkToFit() -> void { 945 // May only be called on allocated instances! 946 ABSL_HARDENING_ASSERT(GetIsAllocated()); 947 948 StorageView<A> storage_view{GetAllocatedData(), GetSize(), 949 GetAllocatedCapacity()}; 950 951 if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return; 952 953 AllocationTransaction<A> allocation_tx(GetAllocator()); 954 955 IteratorValueAdapter<A, MoveIterator<A>> move_values( 956 MoveIterator<A>(storage_view.data)); 957 958 Pointer<A> construct_data; 959 if (storage_view.size > GetInlinedCapacity()) { 960 SizeType<A> requested_capacity = storage_view.size; 961 construct_data = allocation_tx.Allocate(requested_capacity); 962 if (allocation_tx.GetCapacity() >= storage_view.capacity) { 963 // Already using the smallest available heap allocation. 964 return; 965 } 966 } else { 967 construct_data = GetInlinedData(); 968 } 969 970 ABSL_INTERNAL_TRY { 971 ConstructElements<A>(GetAllocator(), construct_data, move_values, 972 storage_view.size); 973 } 974 ABSL_INTERNAL_CATCH_ANY { 975 SetAllocation({storage_view.data, storage_view.capacity}); 976 ABSL_INTERNAL_RETHROW; 977 } 978 979 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 980 storage_view.size); 981 982 MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data, 983 storage_view.capacity); 984 985 if (allocation_tx.DidAllocate()) { 986 SetAllocation(std::move(allocation_tx).Release()); 987 } else { 988 UnsetIsAllocated(); 989 } 990 } 991 992 template <typename T, size_t N, typename A> 993 auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void { 994 using std::swap; 995 ABSL_HARDENING_ASSERT(this != other_storage_ptr); 996 997 if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { 998 swap(data_.allocated, other_storage_ptr->data_.allocated); 999 } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { 1000 SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr); 1001 } else { 1002 Storage* allocated_ptr = this; 1003 Storage* inlined_ptr = other_storage_ptr; 1004 if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr); 1005 1006 StorageView<A> allocated_storage_view{ 1007 allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(), 1008 allocated_ptr->GetAllocatedCapacity()}; 1009 1010 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1011 MoveIterator<A>(inlined_ptr->GetInlinedData())); 1012 1013 ABSL_INTERNAL_TRY { 1014 ConstructElements<A>(inlined_ptr->GetAllocator(), 1015 allocated_ptr->GetInlinedData(), move_values, 1016 inlined_ptr->GetSize()); 1017 } 1018 ABSL_INTERNAL_CATCH_ANY { 1019 allocated_ptr->SetAllocation(Allocation<A>{ 1020 allocated_storage_view.data, allocated_storage_view.capacity}); 1021 ABSL_INTERNAL_RETHROW; 1022 } 1023 1024 DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(), 1025 inlined_ptr->GetInlinedData(), 1026 inlined_ptr->GetSize()); 1027 1028 inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data, 1029 allocated_storage_view.capacity}); 1030 } 1031 1032 swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated()); 1033 swap(GetAllocator(), other_storage_ptr->GetAllocator()); 1034 } 1035 1036 template <typename T, size_t N, typename A> 1037 void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other, 1038 SizeType<A> n) { 1039 std::swap_ranges(GetInlinedData(), GetInlinedData() + n, 1040 other->GetInlinedData()); 1041 } 1042 1043 template <typename T, size_t N, typename A> 1044 void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other, 1045 SizeType<A> n) { 1046 Pointer<A> a = GetInlinedData(); 1047 Pointer<A> b = other->GetInlinedData(); 1048 // see note on allocators in `SwapInlinedElements`. 1049 A& allocator_a = GetAllocator(); 1050 A& allocator_b = other->GetAllocator(); 1051 for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) { 1052 ValueType<A> tmp(std::move(*a)); 1053 1054 AllocatorTraits<A>::destroy(allocator_a, a); 1055 AllocatorTraits<A>::construct(allocator_b, a, std::move(*b)); 1056 1057 AllocatorTraits<A>::destroy(allocator_b, b); 1058 AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp)); 1059 } 1060 } 1061 1062 template <typename T, size_t N, typename A> 1063 void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) { 1064 Data tmp = data_; 1065 data_ = other->data_; 1066 other->data_ = tmp; 1067 } 1068 1069 template <typename T, size_t N, typename A> 1070 template <typename NotMemcpyPolicy> 1071 void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy, 1072 Storage* other) { 1073 // Note: `destroy` needs to use pre-swap allocator while `construct` - 1074 // post-swap allocator. Allocators will be swapped later on outside of 1075 // `SwapInlinedElements`. 1076 Storage* small_ptr = this; 1077 Storage* large_ptr = other; 1078 if (small_ptr->GetSize() > large_ptr->GetSize()) { 1079 std::swap(small_ptr, large_ptr); 1080 } 1081 1082 auto small_size = small_ptr->GetSize(); 1083 auto diff = large_ptr->GetSize() - small_size; 1084 SwapN(policy, other, small_size); 1085 1086 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1087 MoveIterator<A>(large_ptr->GetInlinedData() + small_size)); 1088 1089 ConstructElements<A>(large_ptr->GetAllocator(), 1090 small_ptr->GetInlinedData() + small_size, move_values, 1091 diff); 1092 1093 DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(), 1094 large_ptr->GetInlinedData() + small_size, 1095 diff); 1096 } 1097 1098 // End ignore "array-bounds" 1099 #if !defined(__clang__) && defined(__GNUC__) 1100 #pragma GCC diagnostic pop 1101 #endif 1102 1103 } // namespace inlined_vector_internal 1104 ABSL_NAMESPACE_END 1105 } // namespace absl 1106 1107 #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 1108