• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2019 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
16 #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
17 
18 #include <algorithm>
19 #include <cstddef>
20 #include <cstring>
21 #include <iterator>
22 #include <limits>
23 #include <memory>
24 #include <new>
25 #include <type_traits>
26 #include <utility>
27 
28 #include "absl/base/attributes.h"
29 #include "absl/base/config.h"
30 #include "absl/base/macros.h"
31 #include "absl/container/internal/compressed_tuple.h"
32 #include "absl/memory/memory.h"
33 #include "absl/meta/type_traits.h"
34 #include "absl/types/span.h"
35 
36 namespace absl {
37 ABSL_NAMESPACE_BEGIN
38 namespace inlined_vector_internal {
39 
40 // GCC does not deal very well with the below code
41 #if !defined(__clang__) && defined(__GNUC__)
42 #pragma GCC diagnostic push
43 #pragma GCC diagnostic ignored "-Warray-bounds"
44 #endif
45 
46 template <typename A>
47 using AllocatorTraits = std::allocator_traits<A>;
48 template <typename A>
49 using ValueType = typename AllocatorTraits<A>::value_type;
50 template <typename A>
51 using SizeType = typename AllocatorTraits<A>::size_type;
52 template <typename A>
53 using Pointer = typename AllocatorTraits<A>::pointer;
54 template <typename A>
55 using ConstPointer = typename AllocatorTraits<A>::const_pointer;
56 template <typename A>
57 using SizeType = typename AllocatorTraits<A>::size_type;
58 template <typename A>
59 using DifferenceType = typename AllocatorTraits<A>::difference_type;
60 template <typename A>
61 using Reference = ValueType<A>&;
62 template <typename A>
63 using ConstReference = const ValueType<A>&;
64 template <typename A>
65 using Iterator = Pointer<A>;
66 template <typename A>
67 using ConstIterator = ConstPointer<A>;
68 template <typename A>
69 using ReverseIterator = typename std::reverse_iterator<Iterator<A>>;
70 template <typename A>
71 using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>;
72 template <typename A>
73 using MoveIterator = typename std::move_iterator<Iterator<A>>;
74 
75 template <typename Iterator>
76 using IsAtLeastForwardIterator = std::is_convertible<
77     typename std::iterator_traits<Iterator>::iterator_category,
78     std::forward_iterator_tag>;
79 
80 template <typename A>
81 using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>;
82 template <typename A>
83 using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>;
84 
85 template <typename T>
86 struct TypeIdentity {
87   using type = T;
88 };
89 
90 // Used for function arguments in template functions to prevent ADL by forcing
91 // callers to explicitly specify the template parameter.
92 template <typename T>
93 using NoTypeDeduction = typename TypeIdentity<T>::type;
94 
95 template <typename A, bool IsTriviallyDestructible =
96                           absl::is_trivially_destructible<ValueType<A>>::value>
97 struct DestroyAdapter;
98 
99 template <typename A>
100 struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> {
101   static void DestroyElements(A& allocator, Pointer<A> destroy_first,
102                               SizeType<A> destroy_size) {
103     for (SizeType<A> i = destroy_size; i != 0;) {
104       --i;
105       AllocatorTraits<A>::destroy(allocator, destroy_first + i);
106     }
107   }
108 };
109 
110 template <typename A>
111 struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> {
112   static void DestroyElements(A& allocator, Pointer<A> destroy_first,
113                               SizeType<A> destroy_size) {
114     static_cast<void>(allocator);
115     static_cast<void>(destroy_first);
116     static_cast<void>(destroy_size);
117   }
118 };
119 
120 template <typename A>
121 struct Allocation {
122   Pointer<A> data = nullptr;
123   SizeType<A> capacity = 0;
124 };
125 
126 template <typename A,
127           bool IsOverAligned =
128               (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)>
129 struct MallocAdapter {
130   static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) {
131     return {AllocatorTraits<A>::allocate(allocator, requested_capacity),
132             requested_capacity};
133   }
134 
135   static void Deallocate(A& allocator, Pointer<A> pointer,
136                          SizeType<A> capacity) {
137     AllocatorTraits<A>::deallocate(allocator, pointer, capacity);
138   }
139 };
140 
141 template <typename A, typename ValueAdapter>
142 void ConstructElements(NoTypeDeduction<A>& allocator,
143                        Pointer<A> construct_first, ValueAdapter& values,
144                        SizeType<A> construct_size) {
145   for (SizeType<A> i = 0; i < construct_size; ++i) {
146     ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); }
147     ABSL_INTERNAL_CATCH_ANY {
148       DestroyAdapter<A>::DestroyElements(allocator, construct_first, i);
149       ABSL_INTERNAL_RETHROW;
150     }
151   }
152 }
153 
154 template <typename A, typename ValueAdapter>
155 void AssignElements(Pointer<A> assign_first, ValueAdapter& values,
156                     SizeType<A> assign_size) {
157   for (SizeType<A> i = 0; i < assign_size; ++i) {
158     values.AssignNext(assign_first + i);
159   }
160 }
161 
162 template <typename A>
163 struct StorageView {
164   Pointer<A> data;
165   SizeType<A> size;
166   SizeType<A> capacity;
167 };
168 
169 template <typename A, typename Iterator>
170 class IteratorValueAdapter {
171  public:
172   explicit IteratorValueAdapter(const Iterator& it) : it_(it) {}
173 
174   void ConstructNext(A& allocator, Pointer<A> construct_at) {
175     AllocatorTraits<A>::construct(allocator, construct_at, *it_);
176     ++it_;
177   }
178 
179   void AssignNext(Pointer<A> assign_at) {
180     *assign_at = *it_;
181     ++it_;
182   }
183 
184  private:
185   Iterator it_;
186 };
187 
188 template <typename A>
189 class CopyValueAdapter {
190  public:
191   explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {}
192 
193   void ConstructNext(A& allocator, Pointer<A> construct_at) {
194     AllocatorTraits<A>::construct(allocator, construct_at, *ptr_);
195   }
196 
197   void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; }
198 
199  private:
200   ConstPointer<A> ptr_;
201 };
202 
203 template <typename A>
204 class DefaultValueAdapter {
205  public:
206   explicit DefaultValueAdapter() {}
207 
208   void ConstructNext(A& allocator, Pointer<A> construct_at) {
209     AllocatorTraits<A>::construct(allocator, construct_at);
210   }
211 
212   void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); }
213 };
214 
215 template <typename A>
216 class AllocationTransaction {
217  public:
218   explicit AllocationTransaction(A& allocator)
219       : allocator_data_(allocator, nullptr), capacity_(0) {}
220 
221   ~AllocationTransaction() {
222     if (DidAllocate()) {
223       MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity());
224     }
225   }
226 
227   AllocationTransaction(const AllocationTransaction&) = delete;
228   void operator=(const AllocationTransaction&) = delete;
229 
230   A& GetAllocator() { return allocator_data_.template get<0>(); }
231   Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
232   SizeType<A>& GetCapacity() { return capacity_; }
233 
234   bool DidAllocate() { return GetData() != nullptr; }
235 
236   Pointer<A> Allocate(SizeType<A> requested_capacity) {
237     Allocation<A> result =
238         MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
239     GetData() = result.data;
240     GetCapacity() = result.capacity;
241     return result.data;
242   }
243 
244   ABSL_MUST_USE_RESULT Allocation<A> Release() && {
245     Allocation<A> result = {GetData(), GetCapacity()};
246     Reset();
247     return result;
248   }
249 
250  private:
251   void Reset() {
252     GetData() = nullptr;
253     GetCapacity() = 0;
254   }
255 
256   container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
257   SizeType<A> capacity_;
258 };
259 
260 template <typename A>
261 class ConstructionTransaction {
262  public:
263   explicit ConstructionTransaction(A& allocator)
264       : allocator_data_(allocator, nullptr), size_(0) {}
265 
266   ~ConstructionTransaction() {
267     if (DidConstruct()) {
268       DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize());
269     }
270   }
271 
272   ConstructionTransaction(const ConstructionTransaction&) = delete;
273   void operator=(const ConstructionTransaction&) = delete;
274 
275   A& GetAllocator() { return allocator_data_.template get<0>(); }
276   Pointer<A>& GetData() { return allocator_data_.template get<1>(); }
277   SizeType<A>& GetSize() { return size_; }
278 
279   bool DidConstruct() { return GetData() != nullptr; }
280   template <typename ValueAdapter>
281   void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) {
282     ConstructElements<A>(GetAllocator(), data, values, size);
283     GetData() = data;
284     GetSize() = size;
285   }
286   void Commit() && {
287     GetData() = nullptr;
288     GetSize() = 0;
289   }
290 
291  private:
292   container_internal::CompressedTuple<A, Pointer<A>> allocator_data_;
293   SizeType<A> size_;
294 };
295 
296 template <typename T, size_t N, typename A>
297 class Storage {
298  public:
299   struct MemcpyPolicy {};
300   struct ElementwiseAssignPolicy {};
301   struct ElementwiseSwapPolicy {};
302   struct ElementwiseConstructPolicy {};
303 
304   using MoveAssignmentPolicy = absl::conditional_t<
305       // Fast path: if the value type can be trivially move assigned and
306       // destroyed, and we know the allocator doesn't do anything fancy, then
307       // it's safe for us to simply adopt the contents of the storage for
308       // `other` and remove its own reference to them. It's as if we had
309       // individually move-assigned each value and then destroyed the original.
310       absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>,
311                         absl::is_trivially_destructible<ValueType<A>>,
312                         std::is_same<A, std::allocator<ValueType<A>>>>::value,
313       MemcpyPolicy,
314       // Otherwise we use move assignment if possible. If not, we simulate
315       // move assignment using move construction.
316       //
317       // Note that this is in contrast to e.g. std::vector and std::optional,
318       // which are themselves not move-assignable when their contained type is
319       // not.
320       absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy,
321                           ElementwiseConstructPolicy>>;
322 
323   // The policy to be used specifically when swapping inlined elements.
324   using SwapInlinedElementsPolicy = absl::conditional_t<
325       // Fast path: if the value type can be trivially move constructed/assigned
326       // and destroyed, and we know the allocator doesn't do anything fancy,
327       // then it's safe for us to simply swap the bytes in the inline storage.
328       // It's as if we had move-constructed a temporary vector, move-assigned
329       // one to the other, then move-assigned the first from the temporary.
330       absl::conjunction<absl::is_trivially_move_constructible<ValueType<A>>,
331                         absl::is_trivially_move_assignable<ValueType<A>>,
332                         absl::is_trivially_destructible<ValueType<A>>,
333                         std::is_same<A, std::allocator<ValueType<A>>>>::value,
334       MemcpyPolicy,
335       absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy,
336                           ElementwiseConstructPolicy>>;
337 
338   static SizeType<A> NextCapacity(SizeType<A> current_capacity) {
339     return current_capacity * 2;
340   }
341 
342   static SizeType<A> ComputeCapacity(SizeType<A> current_capacity,
343                                      SizeType<A> requested_capacity) {
344     return (std::max)(NextCapacity(current_capacity), requested_capacity);
345   }
346 
347   // ---------------------------------------------------------------------------
348   // Storage Constructors and Destructor
349   // ---------------------------------------------------------------------------
350 
351   Storage() : metadata_(A(), /* size and is_allocated */ 0u) {}
352 
353   explicit Storage(const A& allocator)
354       : metadata_(allocator, /* size and is_allocated */ 0u) {}
355 
356   ~Storage() {
357     // Fast path: if we are empty and not allocated, there's nothing to do.
358     if (GetSizeAndIsAllocated() == 0) {
359       return;
360     }
361 
362     // Fast path: if no destructors need to be run and we know the allocator
363     // doesn't do anything fancy, then all we need to do is deallocate (and
364     // maybe not even that).
365     if (absl::is_trivially_destructible<ValueType<A>>::value &&
366         std::is_same<A, std::allocator<ValueType<A>>>::value) {
367       DeallocateIfAllocated();
368       return;
369     }
370 
371     DestroyContents();
372   }
373 
374   // ---------------------------------------------------------------------------
375   // Storage Member Accessors
376   // ---------------------------------------------------------------------------
377 
378   SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
379 
380   const SizeType<A>& GetSizeAndIsAllocated() const {
381     return metadata_.template get<1>();
382   }
383 
384   SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; }
385 
386   bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
387 
388   Pointer<A> GetAllocatedData() {
389     // GCC 12 has a false-positive -Wmaybe-uninitialized warning here.
390 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
391 #pragma GCC diagnostic push
392 #pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
393 #endif
394     return data_.allocated.allocated_data;
395 #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
396 #pragma GCC diagnostic pop
397 #endif
398   }
399 
400   ConstPointer<A> GetAllocatedData() const {
401     return data_.allocated.allocated_data;
402   }
403 
404   // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be
405   // uninitialized, a common pattern in allocate()+construct() APIs.
406   // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking
407   // NOTE: When this was written, LLVM documentation did not explicitly
408   // mention that casting `char*` and using `reinterpret_cast` qualifies
409   // as a bad cast.
410   ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() {
411     return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data);
412   }
413 
414   ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const {
415     return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data);
416   }
417 
418   SizeType<A> GetAllocatedCapacity() const {
419     return data_.allocated.allocated_capacity;
420   }
421 
422   SizeType<A> GetInlinedCapacity() const {
423     return static_cast<SizeType<A>>(kOptimalInlinedSize);
424   }
425 
426   StorageView<A> MakeStorageView() {
427     return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(),
428                                              GetAllocatedCapacity()}
429                             : StorageView<A>{GetInlinedData(), GetSize(),
430                                              GetInlinedCapacity()};
431   }
432 
433   A& GetAllocator() { return metadata_.template get<0>(); }
434 
435   const A& GetAllocator() const { return metadata_.template get<0>(); }
436 
437   // ---------------------------------------------------------------------------
438   // Storage Member Mutators
439   // ---------------------------------------------------------------------------
440 
441   ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other);
442 
443   template <typename ValueAdapter>
444   void Initialize(ValueAdapter values, SizeType<A> new_size);
445 
446   template <typename ValueAdapter>
447   void Assign(ValueAdapter values, SizeType<A> new_size);
448 
449   template <typename ValueAdapter>
450   void Resize(ValueAdapter values, SizeType<A> new_size);
451 
452   template <typename ValueAdapter>
453   Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values,
454                      SizeType<A> insert_count);
455 
456   template <typename... Args>
457   Reference<A> EmplaceBack(Args&&... args);
458 
459   Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to);
460 
461   void Reserve(SizeType<A> requested_capacity);
462 
463   void ShrinkToFit();
464 
465   void Swap(Storage* other_storage_ptr);
466 
467   void SetIsAllocated() {
468     GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1);
469   }
470 
471   void UnsetIsAllocated() {
472     GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1);
473   }
474 
475   void SetSize(SizeType<A> size) {
476     GetSizeAndIsAllocated() =
477         (size << 1) | static_cast<SizeType<A>>(GetIsAllocated());
478   }
479 
480   void SetAllocatedSize(SizeType<A> size) {
481     GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1);
482   }
483 
484   void SetInlinedSize(SizeType<A> size) {
485     GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1);
486   }
487 
488   void AddSize(SizeType<A> count) {
489     GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1);
490   }
491 
492   void SubtractSize(SizeType<A> count) {
493     ABSL_HARDENING_ASSERT(count <= GetSize());
494 
495     GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1);
496   }
497 
498   void SetAllocation(Allocation<A> allocation) {
499     data_.allocated.allocated_data = allocation.data;
500     data_.allocated.allocated_capacity = allocation.capacity;
501   }
502 
503   void MemcpyFrom(const Storage& other_storage) {
504     // Assumption check: it doesn't make sense to memcpy inlined elements unless
505     // we know the allocator doesn't do anything fancy, and one of the following
506     // holds:
507     //
508     //  *  The elements are trivially relocatable.
509     //
510     //  *  It's possible to trivially assign the elements and then destroy the
511     //     source.
512     //
513     //  *  It's possible to trivially copy construct/assign the elements.
514     //
515     {
516       using V = ValueType<A>;
517       ABSL_HARDENING_ASSERT(
518           other_storage.GetIsAllocated() ||
519           (std::is_same<A, std::allocator<V>>::value &&
520            (
521                // First case above
522                absl::is_trivially_relocatable<V>::value ||
523                // Second case above
524                (absl::is_trivially_move_assignable<V>::value &&
525                 absl::is_trivially_destructible<V>::value) ||
526                // Third case above
527                (absl::is_trivially_copy_constructible<V>::value ||
528                 absl::is_trivially_copy_assignable<V>::value))));
529     }
530 
531     GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated();
532     data_ = other_storage.data_;
533   }
534 
535   void DeallocateIfAllocated() {
536     if (GetIsAllocated()) {
537       MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(),
538                                    GetAllocatedCapacity());
539     }
540   }
541 
542  private:
543   ABSL_ATTRIBUTE_NOINLINE void DestroyContents();
544 
545   using Metadata = container_internal::CompressedTuple<A, SizeType<A>>;
546 
547   struct Allocated {
548     Pointer<A> allocated_data;
549     SizeType<A> allocated_capacity;
550   };
551 
552   // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the
553   // `InlinedVector`. Sometimes, it is possible to increase the capacity (from
554   // the user requested `N`) without increasing the size of the `InlinedVector`.
555   static constexpr size_t kOptimalInlinedSize =
556       (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>));
557 
558   struct Inlined {
559     alignas(ValueType<A>) char inlined_data[sizeof(
560         ValueType<A>[kOptimalInlinedSize])];
561   };
562 
563   union Data {
564     Allocated allocated;
565     Inlined inlined;
566   };
567 
568   void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n);
569   void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n);
570 
571   void SwapInlinedElements(MemcpyPolicy, Storage* other);
572   template <typename NotMemcpyPolicy>
573   void SwapInlinedElements(NotMemcpyPolicy, Storage* other);
574 
575   template <typename... Args>
576   ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args);
577 
578   Metadata metadata_;
579   Data data_;
580 };
581 
582 template <typename T, size_t N, typename A>
583 void Storage<T, N, A>::DestroyContents() {
584   Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData();
585   DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize());
586   DeallocateIfAllocated();
587 }
588 
589 template <typename T, size_t N, typename A>
590 void Storage<T, N, A>::InitFrom(const Storage& other) {
591   const SizeType<A> n = other.GetSize();
592   ABSL_HARDENING_ASSERT(n > 0);  // Empty sources handled handled in caller.
593   ConstPointer<A> src;
594   Pointer<A> dst;
595   if (!other.GetIsAllocated()) {
596     dst = GetInlinedData();
597     src = other.GetInlinedData();
598   } else {
599     // Because this is only called from the `InlinedVector` constructors, it's
600     // safe to take on the allocation with size `0`. If `ConstructElements(...)`
601     // throws, deallocation will be automatically handled by `~Storage()`.
602     SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n);
603     Allocation<A> allocation =
604         MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
605     SetAllocation(allocation);
606     dst = allocation.data;
607     src = other.GetAllocatedData();
608   }
609 
610   // Fast path: if the value type is trivially copy constructible and we know
611   // the allocator doesn't do anything fancy, then we know it is legal for us to
612   // simply memcpy the other vector's elements.
613   if (absl::is_trivially_copy_constructible<ValueType<A>>::value &&
614       std::is_same<A, std::allocator<ValueType<A>>>::value) {
615     std::memcpy(reinterpret_cast<char*>(dst),
616                 reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>));
617   } else {
618     auto values = IteratorValueAdapter<A, ConstPointer<A>>(src);
619     ConstructElements<A>(GetAllocator(), dst, values, n);
620   }
621 
622   GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated();
623 }
624 
625 template <typename T, size_t N, typename A>
626 template <typename ValueAdapter>
627 auto Storage<T, N, A>::Initialize(ValueAdapter values, SizeType<A> new_size)
628     -> void {
629   // Only callable from constructors!
630   ABSL_HARDENING_ASSERT(!GetIsAllocated());
631   ABSL_HARDENING_ASSERT(GetSize() == 0);
632 
633   Pointer<A> construct_data;
634   if (new_size > GetInlinedCapacity()) {
635     // Because this is only called from the `InlinedVector` constructors, it's
636     // safe to take on the allocation with size `0`. If `ConstructElements(...)`
637     // throws, deallocation will be automatically handled by `~Storage()`.
638     SizeType<A> requested_capacity =
639         ComputeCapacity(GetInlinedCapacity(), new_size);
640     Allocation<A> allocation =
641         MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity);
642     construct_data = allocation.data;
643     SetAllocation(allocation);
644     SetIsAllocated();
645   } else {
646     construct_data = GetInlinedData();
647   }
648 
649   ConstructElements<A>(GetAllocator(), construct_data, values, new_size);
650 
651   // Since the initial size was guaranteed to be `0` and the allocated bit is
652   // already correct for either case, *adding* `new_size` gives us the correct
653   // result faster than setting it directly.
654   AddSize(new_size);
655 }
656 
657 template <typename T, size_t N, typename A>
658 template <typename ValueAdapter>
659 auto Storage<T, N, A>::Assign(ValueAdapter values, SizeType<A> new_size)
660     -> void {
661   StorageView<A> storage_view = MakeStorageView();
662 
663   AllocationTransaction<A> allocation_tx(GetAllocator());
664 
665   absl::Span<ValueType<A>> assign_loop;
666   absl::Span<ValueType<A>> construct_loop;
667   absl::Span<ValueType<A>> destroy_loop;
668 
669   if (new_size > storage_view.capacity) {
670     SizeType<A> requested_capacity =
671         ComputeCapacity(storage_view.capacity, new_size);
672     construct_loop = {allocation_tx.Allocate(requested_capacity), new_size};
673     destroy_loop = {storage_view.data, storage_view.size};
674   } else if (new_size > storage_view.size) {
675     assign_loop = {storage_view.data, storage_view.size};
676     construct_loop = {storage_view.data + storage_view.size,
677                       new_size - storage_view.size};
678   } else {
679     assign_loop = {storage_view.data, new_size};
680     destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
681   }
682 
683   AssignElements<A>(assign_loop.data(), values, assign_loop.size());
684 
685   ConstructElements<A>(GetAllocator(), construct_loop.data(), values,
686                        construct_loop.size());
687 
688   DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(),
689                                      destroy_loop.size());
690 
691   if (allocation_tx.DidAllocate()) {
692     DeallocateIfAllocated();
693     SetAllocation(std::move(allocation_tx).Release());
694     SetIsAllocated();
695   }
696 
697   SetSize(new_size);
698 }
699 
700 template <typename T, size_t N, typename A>
701 template <typename ValueAdapter>
702 auto Storage<T, N, A>::Resize(ValueAdapter values, SizeType<A> new_size)
703     -> void {
704   StorageView<A> storage_view = MakeStorageView();
705   Pointer<A> const base = storage_view.data;
706   const SizeType<A> size = storage_view.size;
707   A& alloc = GetAllocator();
708   if (new_size <= size) {
709     // Destroy extra old elements.
710     DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size);
711   } else if (new_size <= storage_view.capacity) {
712     // Construct new elements in place.
713     ConstructElements<A>(alloc, base + size, values, new_size - size);
714   } else {
715     // Steps:
716     //  a. Allocate new backing store.
717     //  b. Construct new elements in new backing store.
718     //  c. Move existing elements from old backing store to new backing store.
719     //  d. Destroy all elements in old backing store.
720     // Use transactional wrappers for the first two steps so we can roll
721     // back if necessary due to exceptions.
722     AllocationTransaction<A> allocation_tx(alloc);
723     SizeType<A> requested_capacity =
724         ComputeCapacity(storage_view.capacity, new_size);
725     Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
726 
727     ConstructionTransaction<A> construction_tx(alloc);
728     construction_tx.Construct(new_data + size, values, new_size - size);
729 
730     IteratorValueAdapter<A, MoveIterator<A>> move_values(
731         (MoveIterator<A>(base)));
732     ConstructElements<A>(alloc, new_data, move_values, size);
733 
734     DestroyAdapter<A>::DestroyElements(alloc, base, size);
735     std::move(construction_tx).Commit();
736     DeallocateIfAllocated();
737     SetAllocation(std::move(allocation_tx).Release());
738     SetIsAllocated();
739   }
740   SetSize(new_size);
741 }
742 
743 template <typename T, size_t N, typename A>
744 template <typename ValueAdapter>
745 auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values,
746                               SizeType<A> insert_count) -> Iterator<A> {
747   StorageView<A> storage_view = MakeStorageView();
748 
749   auto insert_index = static_cast<SizeType<A>>(
750       std::distance(ConstIterator<A>(storage_view.data), pos));
751   SizeType<A> insert_end_index = insert_index + insert_count;
752   SizeType<A> new_size = storage_view.size + insert_count;
753 
754   if (new_size > storage_view.capacity) {
755     AllocationTransaction<A> allocation_tx(GetAllocator());
756     ConstructionTransaction<A> construction_tx(GetAllocator());
757     ConstructionTransaction<A> move_construction_tx(GetAllocator());
758 
759     IteratorValueAdapter<A, MoveIterator<A>> move_values(
760         MoveIterator<A>(storage_view.data));
761 
762     SizeType<A> requested_capacity =
763         ComputeCapacity(storage_view.capacity, new_size);
764     Pointer<A> new_data = allocation_tx.Allocate(requested_capacity);
765 
766     construction_tx.Construct(new_data + insert_index, values, insert_count);
767 
768     move_construction_tx.Construct(new_data, move_values, insert_index);
769 
770     ConstructElements<A>(GetAllocator(), new_data + insert_end_index,
771                          move_values, storage_view.size - insert_index);
772 
773     DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
774                                        storage_view.size);
775 
776     std::move(construction_tx).Commit();
777     std::move(move_construction_tx).Commit();
778     DeallocateIfAllocated();
779     SetAllocation(std::move(allocation_tx).Release());
780 
781     SetAllocatedSize(new_size);
782     return Iterator<A>(new_data + insert_index);
783   } else {
784     SizeType<A> move_construction_destination_index =
785         (std::max)(insert_end_index, storage_view.size);
786 
787     ConstructionTransaction<A> move_construction_tx(GetAllocator());
788 
789     IteratorValueAdapter<A, MoveIterator<A>> move_construction_values(
790         MoveIterator<A>(storage_view.data +
791                         (move_construction_destination_index - insert_count)));
792     absl::Span<ValueType<A>> move_construction = {
793         storage_view.data + move_construction_destination_index,
794         new_size - move_construction_destination_index};
795 
796     Pointer<A> move_assignment_values = storage_view.data + insert_index;
797     absl::Span<ValueType<A>> move_assignment = {
798         storage_view.data + insert_end_index,
799         move_construction_destination_index - insert_end_index};
800 
801     absl::Span<ValueType<A>> insert_assignment = {move_assignment_values,
802                                                   move_construction.size()};
803 
804     absl::Span<ValueType<A>> insert_construction = {
805         insert_assignment.data() + insert_assignment.size(),
806         insert_count - insert_assignment.size()};
807 
808     move_construction_tx.Construct(move_construction.data(),
809                                    move_construction_values,
810                                    move_construction.size());
811 
812     for (Pointer<A>
813              destination = move_assignment.data() + move_assignment.size(),
814              last_destination = move_assignment.data(),
815              source = move_assignment_values + move_assignment.size();
816          ;) {
817       --destination;
818       --source;
819       if (destination < last_destination) break;
820       *destination = std::move(*source);
821     }
822 
823     AssignElements<A>(insert_assignment.data(), values,
824                       insert_assignment.size());
825 
826     ConstructElements<A>(GetAllocator(), insert_construction.data(), values,
827                          insert_construction.size());
828 
829     std::move(move_construction_tx).Commit();
830 
831     AddSize(insert_count);
832     return Iterator<A>(storage_view.data + insert_index);
833   }
834 }
835 
836 template <typename T, size_t N, typename A>
837 template <typename... Args>
838 auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> {
839   StorageView<A> storage_view = MakeStorageView();
840   const SizeType<A> n = storage_view.size;
841   if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) {
842     // Fast path; new element fits.
843     Pointer<A> last_ptr = storage_view.data + n;
844     AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
845                                   std::forward<Args>(args)...);
846     AddSize(1);
847     return *last_ptr;
848   }
849   // TODO(b/173712035): Annotate with musttail attribute to prevent regression.
850   return EmplaceBackSlow(std::forward<Args>(args)...);
851 }
852 
853 template <typename T, size_t N, typename A>
854 template <typename... Args>
855 auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> {
856   StorageView<A> storage_view = MakeStorageView();
857   AllocationTransaction<A> allocation_tx(GetAllocator());
858   IteratorValueAdapter<A, MoveIterator<A>> move_values(
859       MoveIterator<A>(storage_view.data));
860   SizeType<A> requested_capacity = NextCapacity(storage_view.capacity);
861   Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity);
862   Pointer<A> last_ptr = construct_data + storage_view.size;
863 
864   // Construct new element.
865   AllocatorTraits<A>::construct(GetAllocator(), last_ptr,
866                                 std::forward<Args>(args)...);
867   // Move elements from old backing store to new backing store.
868   ABSL_INTERNAL_TRY {
869     ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values,
870                          storage_view.size);
871   }
872   ABSL_INTERNAL_CATCH_ANY {
873     AllocatorTraits<A>::destroy(GetAllocator(), last_ptr);
874     ABSL_INTERNAL_RETHROW;
875   }
876   // Destroy elements in old backing store.
877   DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
878                                      storage_view.size);
879 
880   DeallocateIfAllocated();
881   SetAllocation(std::move(allocation_tx).Release());
882   SetIsAllocated();
883   AddSize(1);
884   return *last_ptr;
885 }
886 
887 template <typename T, size_t N, typename A>
888 auto Storage<T, N, A>::Erase(ConstIterator<A> from, ConstIterator<A> to)
889     -> Iterator<A> {
890   StorageView<A> storage_view = MakeStorageView();
891 
892   auto erase_size = static_cast<SizeType<A>>(std::distance(from, to));
893   auto erase_index = static_cast<SizeType<A>>(
894       std::distance(ConstIterator<A>(storage_view.data), from));
895   SizeType<A> erase_end_index = erase_index + erase_size;
896 
897   IteratorValueAdapter<A, MoveIterator<A>> move_values(
898       MoveIterator<A>(storage_view.data + erase_end_index));
899 
900   AssignElements<A>(storage_view.data + erase_index, move_values,
901                     storage_view.size - erase_end_index);
902 
903   DestroyAdapter<A>::DestroyElements(
904       GetAllocator(), storage_view.data + (storage_view.size - erase_size),
905       erase_size);
906 
907   SubtractSize(erase_size);
908   return Iterator<A>(storage_view.data + erase_index);
909 }
910 
911 template <typename T, size_t N, typename A>
912 auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void {
913   StorageView<A> storage_view = MakeStorageView();
914 
915   if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return;
916 
917   AllocationTransaction<A> allocation_tx(GetAllocator());
918 
919   IteratorValueAdapter<A, MoveIterator<A>> move_values(
920       MoveIterator<A>(storage_view.data));
921 
922   SizeType<A> new_requested_capacity =
923       ComputeCapacity(storage_view.capacity, requested_capacity);
924   Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity);
925 
926   ConstructElements<A>(GetAllocator(), new_data, move_values,
927                        storage_view.size);
928 
929   DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
930                                      storage_view.size);
931 
932   DeallocateIfAllocated();
933   SetAllocation(std::move(allocation_tx).Release());
934   SetIsAllocated();
935 }
936 
937 template <typename T, size_t N, typename A>
938 auto Storage<T, N, A>::ShrinkToFit() -> void {
939   // May only be called on allocated instances!
940   ABSL_HARDENING_ASSERT(GetIsAllocated());
941 
942   StorageView<A> storage_view{GetAllocatedData(), GetSize(),
943                               GetAllocatedCapacity()};
944 
945   if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return;
946 
947   AllocationTransaction<A> allocation_tx(GetAllocator());
948 
949   IteratorValueAdapter<A, MoveIterator<A>> move_values(
950       MoveIterator<A>(storage_view.data));
951 
952   Pointer<A> construct_data;
953   if (storage_view.size > GetInlinedCapacity()) {
954     SizeType<A> requested_capacity = storage_view.size;
955     construct_data = allocation_tx.Allocate(requested_capacity);
956     if (allocation_tx.GetCapacity() >= storage_view.capacity) {
957       // Already using the smallest available heap allocation.
958       return;
959     }
960   } else {
961     construct_data = GetInlinedData();
962   }
963 
964   ABSL_INTERNAL_TRY {
965     ConstructElements<A>(GetAllocator(), construct_data, move_values,
966                          storage_view.size);
967   }
968   ABSL_INTERNAL_CATCH_ANY {
969     SetAllocation({storage_view.data, storage_view.capacity});
970     ABSL_INTERNAL_RETHROW;
971   }
972 
973   DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data,
974                                      storage_view.size);
975 
976   MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data,
977                                storage_view.capacity);
978 
979   if (allocation_tx.DidAllocate()) {
980     SetAllocation(std::move(allocation_tx).Release());
981   } else {
982     UnsetIsAllocated();
983   }
984 }
985 
986 template <typename T, size_t N, typename A>
987 auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
988   using std::swap;
989   ABSL_HARDENING_ASSERT(this != other_storage_ptr);
990 
991   if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
992     swap(data_.allocated, other_storage_ptr->data_.allocated);
993   } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
994     SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr);
995   } else {
996     Storage* allocated_ptr = this;
997     Storage* inlined_ptr = other_storage_ptr;
998     if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
999 
1000     StorageView<A> allocated_storage_view{
1001         allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(),
1002         allocated_ptr->GetAllocatedCapacity()};
1003 
1004     IteratorValueAdapter<A, MoveIterator<A>> move_values(
1005         MoveIterator<A>(inlined_ptr->GetInlinedData()));
1006 
1007     ABSL_INTERNAL_TRY {
1008       ConstructElements<A>(inlined_ptr->GetAllocator(),
1009                            allocated_ptr->GetInlinedData(), move_values,
1010                            inlined_ptr->GetSize());
1011     }
1012     ABSL_INTERNAL_CATCH_ANY {
1013       allocated_ptr->SetAllocation(Allocation<A>{
1014           allocated_storage_view.data, allocated_storage_view.capacity});
1015       ABSL_INTERNAL_RETHROW;
1016     }
1017 
1018     DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(),
1019                                        inlined_ptr->GetInlinedData(),
1020                                        inlined_ptr->GetSize());
1021 
1022     inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data,
1023                                              allocated_storage_view.capacity});
1024   }
1025 
1026   swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
1027   swap(GetAllocator(), other_storage_ptr->GetAllocator());
1028 }
1029 
1030 template <typename T, size_t N, typename A>
1031 void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other,
1032                              SizeType<A> n) {
1033   std::swap_ranges(GetInlinedData(), GetInlinedData() + n,
1034                    other->GetInlinedData());
1035 }
1036 
1037 template <typename T, size_t N, typename A>
1038 void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other,
1039                              SizeType<A> n) {
1040   Pointer<A> a = GetInlinedData();
1041   Pointer<A> b = other->GetInlinedData();
1042   // see note on allocators in `SwapInlinedElements`.
1043   A& allocator_a = GetAllocator();
1044   A& allocator_b = other->GetAllocator();
1045   for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) {
1046     ValueType<A> tmp(std::move(*a));
1047 
1048     AllocatorTraits<A>::destroy(allocator_a, a);
1049     AllocatorTraits<A>::construct(allocator_b, a, std::move(*b));
1050 
1051     AllocatorTraits<A>::destroy(allocator_b, b);
1052     AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp));
1053   }
1054 }
1055 
1056 template <typename T, size_t N, typename A>
1057 void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) {
1058   Data tmp = data_;
1059   data_ = other->data_;
1060   other->data_ = tmp;
1061 }
1062 
1063 template <typename T, size_t N, typename A>
1064 template <typename NotMemcpyPolicy>
1065 void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy,
1066                                            Storage* other) {
1067   // Note: `destroy` needs to use pre-swap allocator while `construct` -
1068   // post-swap allocator. Allocators will be swapped later on outside of
1069   // `SwapInlinedElements`.
1070   Storage* small_ptr = this;
1071   Storage* large_ptr = other;
1072   if (small_ptr->GetSize() > large_ptr->GetSize()) {
1073     std::swap(small_ptr, large_ptr);
1074   }
1075 
1076   auto small_size = small_ptr->GetSize();
1077   auto diff = large_ptr->GetSize() - small_size;
1078   SwapN(policy, other, small_size);
1079 
1080   IteratorValueAdapter<A, MoveIterator<A>> move_values(
1081       MoveIterator<A>(large_ptr->GetInlinedData() + small_size));
1082 
1083   ConstructElements<A>(large_ptr->GetAllocator(),
1084                        small_ptr->GetInlinedData() + small_size, move_values,
1085                        diff);
1086 
1087   DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(),
1088                                      large_ptr->GetInlinedData() + small_size,
1089                                      diff);
1090 }
1091 
1092 // End ignore "array-bounds"
1093 #if !defined(__clang__) && defined(__GNUC__)
1094 #pragma GCC diagnostic pop
1095 #endif
1096 
1097 }  // namespace inlined_vector_internal
1098 ABSL_NAMESPACE_END
1099 }  // namespace absl
1100 
1101 #endif  // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_
1102