• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc.  All rights reserved.
3 //
4 // Use of this source code is governed by a BSD-style
5 // license that can be found in the LICENSE file or at
6 // https://developers.google.com/open-source/licenses/bsd
7 
8 // Author: kenton@google.com (Kenton Varda)
9 //  Based on original Protocol Buffers design by
10 //  Sanjay Ghemawat, Jeff Dean, and others.
11 //
12 // RepeatedField and RepeatedPtrField are used by generated protocol message
13 // classes to manipulate repeated fields.  These classes are very similar to
14 // STL's vector, but include a number of optimizations found to be useful
15 // specifically in the case of Protocol Buffers.  RepeatedPtrField is
16 // particularly different from STL vector as it manages ownership of the
17 // pointers that it contains.
18 //
19 // This header covers RepeatedField.
20 
21 #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
22 #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
23 
24 #include <algorithm>
25 #include <cstddef>
26 #include <cstdint>
27 #include <cstring>
28 #include <iterator>
29 #include <limits>
30 #include <memory>
31 #include <type_traits>
32 #include <utility>
33 
34 #include "absl/base/attributes.h"
35 #include "absl/base/dynamic_annotations.h"
36 #include "absl/base/optimization.h"
37 #include "absl/log/absl_check.h"
38 #include "absl/meta/type_traits.h"
39 #include "absl/strings/cord.h"
40 #include "google/protobuf/arena.h"
41 #include "google/protobuf/generated_enum_util.h"
42 #include "google/protobuf/internal_visibility.h"
43 #include "google/protobuf/message_lite.h"
44 #include "google/protobuf/port.h"
45 #include "google/protobuf/repeated_ptr_field.h"
46 
47 // Must be included last.
48 #include "google/protobuf/port_def.inc"
49 
50 #ifdef SWIG
51 #error "You cannot SWIG proto headers"
52 #endif
53 
54 namespace google {
55 namespace protobuf {
56 
57 class Message;
58 class UnknownField;  // For the allowlist
59 
60 namespace internal {
61 
62 template <typename T, int kHeapRepHeaderSize>
RepeatedFieldLowerClampLimit()63 constexpr int RepeatedFieldLowerClampLimit() {
64   // The header is padded to be at least `sizeof(T)` when it would be smaller
65   // otherwise.
66   static_assert(sizeof(T) <= kHeapRepHeaderSize, "");
67   // We want to pad the minimum size to be a power of two bytes, including the
68   // header.
69   // The first allocation is kHeapRepHeaderSize bytes worth of elements for a
70   // total of 2*kHeapRepHeaderSize bytes. For an 8-byte header, we allocate 8
71   // bool, 2 ints, or 1 int64.
72   return kHeapRepHeaderSize / sizeof(T);
73 }
74 
75 // kRepeatedFieldUpperClampLimit is the lowest signed integer value that
76 // overflows when multiplied by 2 (which is undefined behavior). Sizes above
77 // this will clamp to the maximum int value instead of following exponential
78 // growth when growing a repeated field.
79 #if defined(__cpp_inline_variables)
80 inline constexpr int kRepeatedFieldUpperClampLimit =
81 #else
82 constexpr int kRepeatedFieldUpperClampLimit =
83 #endif
84     (std::numeric_limits<int>::max() / 2) + 1;
85 
86 template <typename Element>
87 class RepeatedIterator;
88 
89 // Sentinel base class.
90 struct RepeatedFieldBase {};
91 
92 // We can't skip the destructor for, e.g., arena allocated RepeatedField<Cord>.
93 template <typename Element,
94           bool Trivial = Arena::is_destructor_skippable<Element>::value>
95 struct RepeatedFieldDestructorSkippableBase : RepeatedFieldBase {};
96 
97 template <typename Element>
98 struct RepeatedFieldDestructorSkippableBase<Element, true> : RepeatedFieldBase {
99   using DestructorSkippable_ = void;
100 };
101 
102 template <size_t kMinSize>
103 struct HeapRep {
104   // Avoid 'implicitly deleted dtor' warnings on certain compilers.
105   ~HeapRep() = delete;
106 
107   void* elements() { return this + 1; }
108 
109   // Align to 8 as sanitizers are picky on the alignment of containers to start
110   // at 8 byte offsets even when compiling for 32 bit platforms.
111   union {
112     alignas(8) Arena* arena;
113     // We pad the header to be at least `sizeof(Element)` so that we have
114     // power-of-two sized allocations, which enables Arena optimizations.
115     char padding[kMinSize];
116   };
117 };
118 
119 // We use small object optimization (SOO) to store elements inline when possible
120 // for small repeated fields. We do so in order to avoid memory indirections.
121 // Note that SOO is disabled on 32-bit platforms due to alignment limitations.
122 
123 // SOO data is stored in the same space as the size/capacity ints.
124 enum { kSooCapacityBytes = 2 * sizeof(int) };
125 
126 // Arena/elements pointers are aligned to at least kSooPtrAlignment bytes so we
127 // can use the lower bits to encode whether we're in SOO mode and if so, the
128 // SOO size. NOTE: we also tried using all kSooPtrMask bits to encode SOO size
129 // and use all ones as a sentinel value for non-SOO mode, but that was slower in
130 // benchmarks/loadtests.
131 enum { kSooPtrAlignment = 8 };
132 // The mask for the size bits in SOO mode, and also a sentinel value indicating
133 // that the field is not in SOO mode.
134 enum { kSooPtrMask = ~(kSooPtrAlignment - 1) };
135 // This bit is 0 when in SOO mode and 1 when in non-SOO mode.
136 enum { kNotSooBit = kSooPtrAlignment >> 1 };
137 // These bits are used to encode the size when in SOO mode (sizes are 0-3).
138 enum { kSooSizeMask = kNotSooBit - 1 };
139 
140 // The number of elements that can be stored in the SOO rep. On 64-bit
141 // platforms, this is 1 for int64_t, 2 for int32_t, 3 for bool, and 0 for
142 // absl::Cord. We return 0 to disable SOO on 32-bit platforms.
143 constexpr int SooCapacityElements(size_t element_size) {
144   if (sizeof(void*) < 8) return 0;
145   return std::min<int>(kSooCapacityBytes / element_size, kSooSizeMask);
146 }
147 
148 struct LongSooRep {
149   // Returns char* rather than void* so callers can do pointer arithmetic.
150   char* elements() const {
151     auto ret = reinterpret_cast<char*>(elements_int & kSooPtrMask);
152     ABSL_DCHECK_NE(ret, nullptr);
153     return ret;
154   }
155 
156   uintptr_t elements_int;
157   int size;
158   int capacity;
159 };
160 struct ShortSooRep {
161   constexpr ShortSooRep() = default;
162   explicit ShortSooRep(Arena* arena)
163       : arena_and_size(reinterpret_cast<uintptr_t>(arena)) {
164     ABSL_DCHECK_EQ(size(), 0);
165   }
166 
167   int size() const { return arena_and_size & kSooSizeMask; }
168   bool is_soo() const { return (arena_and_size & kNotSooBit) == 0; }
169 
170   uintptr_t arena_and_size = 0;
171   union {
172     char data[kSooCapacityBytes];
173     // NOTE: in some language versions, we can't have a constexpr constructor
174     // if we don't initialize all fields, but `data` doesn't need to be
175     // initialized so initialize an empty dummy variable instead.
176     std::true_type dummy = {};
177   };
178 };
179 struct SooRep {
180   constexpr SooRep() : short_rep() {}
181   explicit SooRep(Arena* arena) : short_rep(arena) {}
182 
183   bool is_soo() const {
184     static_assert(sizeof(LongSooRep) == sizeof(ShortSooRep), "");
185     static_assert(offsetof(SooRep, long_rep) == offsetof(SooRep, short_rep),
186                   "");
187     static_assert(offsetof(LongSooRep, elements_int) ==
188                       offsetof(ShortSooRep, arena_and_size),
189                   "");
190     return short_rep.is_soo();
191   }
192   Arena* soo_arena() const {
193     ABSL_DCHECK(is_soo());
194     return reinterpret_cast<Arena*>(short_rep.arena_and_size & kSooPtrMask);
195   }
196   int size(bool is_soo) const {
197     ABSL_DCHECK_EQ(is_soo, this->is_soo());
198 #if !defined(__clang__) && defined(__GNUC__)
199 #pragma GCC diagnostic push
200 #pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
201 #endif
202     return is_soo ? short_rep.size() : long_rep.size;
203 #if !defined(__clang__) && defined(__GNUC__)
204 #pragma GCC diagnostic pop
205 #endif
206   }
207   void set_size(bool is_soo, int size) {
208     ABSL_DCHECK_EQ(is_soo, this->is_soo());
209     if (is_soo) {
210       ABSL_DCHECK_LE(size, kSooSizeMask);
211       short_rep.arena_and_size &= kSooPtrMask;
212       short_rep.arena_and_size |= size;
213     } else {
214       long_rep.size = size;
215     }
216   }
217   // Initializes the SooRep in non-SOO mode with the given capacity and heap
218   // allocation.
219   void set_non_soo(bool was_soo, int capacity, void* elements) {
220     ABSL_DCHECK_EQ(was_soo, is_soo());
221     ABSL_DCHECK_NE(elements, nullptr);
222     ABSL_DCHECK_EQ(reinterpret_cast<uintptr_t>(elements) % kSooPtrAlignment,
223                    uintptr_t{0});
224     if (was_soo) long_rep.size = short_rep.size();
225     long_rep.capacity = capacity;
226     long_rep.elements_int = reinterpret_cast<uintptr_t>(elements) | kNotSooBit;
227   }
228 
229   union {
230     LongSooRep long_rep;
231     ShortSooRep short_rep;
232   };
233 };
234 
235 }  // namespace internal
236 
237 // RepeatedField is used to represent repeated fields of a primitive type (in
238 // other words, everything except strings and nested Messages).  Most users will
239 // not ever use a RepeatedField directly; they will use the get-by-index,
240 // set-by-index, and add accessors that are generated for all repeated fields.
241 // Actually, in addition to primitive types, we use RepeatedField for repeated
242 // Cords, because the Cord class is in fact just a reference-counted pointer.
243 // We have to specialize several methods in the Cord case to get the memory
244 // management right; e.g. swapping when appropriate, etc.
245 template <typename Element>
246 class RepeatedField final
247     : private internal::RepeatedFieldDestructorSkippableBase<Element> {
248   static_assert(
249       alignof(Arena) >= alignof(Element),
250       "We only support types that have an alignment smaller than Arena");
251   static_assert(!std::is_const<Element>::value,
252                 "We do not support const value types.");
253   static_assert(!std::is_volatile<Element>::value,
254                 "We do not support volatile value types.");
255   static_assert(!std::is_pointer<Element>::value,
256                 "We do not support pointer value types.");
257   static_assert(!std::is_reference<Element>::value,
258                 "We do not support reference value types.");
259   static constexpr PROTOBUF_ALWAYS_INLINE void StaticValidityCheck() {
260     static_assert(
261         absl::disjunction<internal::is_supported_integral_type<Element>,
262                           internal::is_supported_floating_point_type<Element>,
263                           std::is_same<absl::Cord, Element>,
264                           std::is_same<UnknownField, Element>,
265                           is_proto_enum<Element>>::value,
266         "We only support non-string scalars in RepeatedField.");
267   }
268 
269  public:
270   using value_type = Element;
271   using size_type = int;
272   using difference_type = ptrdiff_t;
273   using reference = Element&;
274   using const_reference = const Element&;
275   using pointer = Element*;
276   using const_pointer = const Element*;
277   using iterator = internal::RepeatedIterator<Element>;
278   using const_iterator = internal::RepeatedIterator<const Element>;
279   using reverse_iterator = std::reverse_iterator<iterator>;
280   using const_reverse_iterator = std::reverse_iterator<const_iterator>;
281 
282   constexpr RepeatedField();
283   RepeatedField(const RepeatedField& rhs) : RepeatedField(nullptr, rhs) {}
284 
285   // TODO: make this constructor private
286   explicit RepeatedField(Arena* arena);
287 
288   template <typename Iter,
289             typename = typename std::enable_if<std::is_constructible<
290                 Element, decltype(*std::declval<Iter>())>::value>::type>
291   RepeatedField(Iter begin, Iter end);
292 
293   // Arena enabled constructors: for internal use only.
294   RepeatedField(internal::InternalVisibility, Arena* arena)
295       : RepeatedField(arena) {}
296   RepeatedField(internal::InternalVisibility, Arena* arena,
297                 const RepeatedField& rhs)
298       : RepeatedField(arena, rhs) {}
299 
300   RepeatedField& operator=(const RepeatedField& other)
301       ABSL_ATTRIBUTE_LIFETIME_BOUND;
302 
303   RepeatedField(RepeatedField&& rhs) noexcept
304       : RepeatedField(nullptr, std::move(rhs)) {}
305   RepeatedField& operator=(RepeatedField&& other) noexcept
306       ABSL_ATTRIBUTE_LIFETIME_BOUND;
307 
308   ~RepeatedField();
309 
310   bool empty() const;
311   int size() const;
312 
313   const_reference Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND;
314   pointer Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND;
315 
316   const_reference operator[](int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
317     return Get(index);
318   }
319   reference operator[](int index) ABSL_ATTRIBUTE_LIFETIME_BOUND {
320     return *Mutable(index);
321   }
322 
323   const_reference at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND;
324   reference at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND;
325 
326   void Set(int index, const Element& value);
327   void Add(Element value);
328 
329   // Appends a new element and returns a pointer to it.
330   // The new element is uninitialized if |Element| is a POD type.
331   pointer Add() ABSL_ATTRIBUTE_LIFETIME_BOUND;
332   // Appends elements in the range [begin, end) after reserving
333   // the appropriate number of elements.
334   template <typename Iter>
335   void Add(Iter begin, Iter end);
336 
337   // Removes the last element in the array.
338   void RemoveLast();
339 
340   // Extracts elements with indices in "[start .. start+num-1]".
341   // Copies them into "elements[0 .. num-1]" if "elements" is not nullptr.
342   // Caution: also moves elements with indices [start+num ..].
343   // Calling this routine inside a loop can cause quadratic behavior.
344   void ExtractSubrange(int start, int num, Element* elements);
345 
346   ABSL_ATTRIBUTE_REINITIALIZES void Clear();
347 
348   // Appends the elements from `other` after this instance.
349   // The end result length will be `other.size() + this->size()`.
350   void MergeFrom(const RepeatedField& other);
351 
352   // Replaces the contents with a copy of the elements from `other`.
353   ABSL_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedField& other);
354 
355   // Replaces the contents with RepeatedField(begin, end).
356   template <typename Iter>
357   ABSL_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end);
358 
359   // Reserves space to expand the field to at least the given size.  If the
360   // array is grown, it will always be at least doubled in size.
361   void Reserve(int new_size);
362 
363   // Resizes the RepeatedField to a new, smaller size.  This is O(1).
364   // Except for RepeatedField<Cord>, for which it is O(size-new_size).
365   void Truncate(int new_size);
366 
367   void AddAlreadyReserved(Element value);
368   int Capacity() const;
369 
370   // Adds `n` elements to this instance asserting there is enough capacity.
371   // The added elements are uninitialized if `Element` is trivial.
372   pointer AddAlreadyReserved() ABSL_ATTRIBUTE_LIFETIME_BOUND;
373   pointer AddNAlreadyReserved(int n) ABSL_ATTRIBUTE_LIFETIME_BOUND;
374 
375   // Like STL resize.  Uses value to fill appended elements.
376   // Like Truncate() if new_size <= size(), otherwise this is
377   // O(new_size - size()).
378   void Resize(size_type new_size, const Element& value);
379 
380   // Gets the underlying array.  This pointer is possibly invalidated by
381   // any add or remove operation.
382   pointer mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND;
383   const_pointer data() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
384 
385   // Swaps entire contents with "other". If they are separate arenas, then
386   // copies data between each other.
387   void Swap(RepeatedField* other);
388 
389   // Swaps two elements.
390   void SwapElements(int index1, int index2);
391 
392   iterator begin() ABSL_ATTRIBUTE_LIFETIME_BOUND;
393   const_iterator begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
394   const_iterator cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
395   iterator end() ABSL_ATTRIBUTE_LIFETIME_BOUND;
396   const_iterator end() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
397   const_iterator cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
398 
399   // Reverse iterator support
400   reverse_iterator rbegin() ABSL_ATTRIBUTE_LIFETIME_BOUND {
401     return reverse_iterator(end());
402   }
403   const_reverse_iterator rbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
404     return const_reverse_iterator(end());
405   }
406   reverse_iterator rend() ABSL_ATTRIBUTE_LIFETIME_BOUND {
407     return reverse_iterator(begin());
408   }
409   const_reverse_iterator rend() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
410     return const_reverse_iterator(begin());
411   }
412 
413   // Returns the number of bytes used by the repeated field, excluding
414   // sizeof(*this)
415   size_t SpaceUsedExcludingSelfLong() const;
416 
417   int SpaceUsedExcludingSelf() const {
418     return internal::ToIntSize(SpaceUsedExcludingSelfLong());
419   }
420 
421   // Removes the element referenced by position.
422   //
423   // Returns an iterator to the element immediately following the removed
424   // element.
425   //
426   // Invalidates all iterators at or after the removed element, including end().
427   iterator erase(const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND;
428 
429   // Removes the elements in the range [first, last).
430   //
431   // Returns an iterator to the element immediately following the removed range.
432   //
433   // Invalidates all iterators at or after the removed range, including end().
434   iterator erase(const_iterator first,
435                  const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND;
436 
437   // Gets the Arena on which this RepeatedField stores its elements.
438   // Note: this can be inaccurate for split default fields so we make this
439   // function non-const.
440   inline Arena* GetArena() { return GetArena(is_soo()); }
441 
442   // For internal use only.
443   //
444   // This is public due to it being called by generated code.
445   inline void InternalSwap(RepeatedField* other);
446 
447   static constexpr size_t InternalGetArenaOffset(internal::InternalVisibility) {
448     return PROTOBUF_FIELD_OFFSET(RepeatedField, soo_rep_) +
449            PROTOBUF_FIELD_OFFSET(internal::ShortSooRep, arena_and_size);
450   }
451 
452  private:
453   using InternalArenaConstructable_ = void;
454   // We use std::max in order to share template instantiations between
455   // different element types.
456   using HeapRep = internal::HeapRep<std::max<size_t>(sizeof(Element), 8)>;
457 
458   template <typename T>
459   friend class Arena::InternalHelper;
460 
461   friend class Arena;
462 
463   static constexpr int kSooCapacityElements =
464       internal::SooCapacityElements(sizeof(Element));
465 
466   static constexpr int kInitialSize = 0;
467   static PROTOBUF_CONSTEXPR const size_t kHeapRepHeaderSize = sizeof(HeapRep);
468 
469   RepeatedField(Arena* arena, const RepeatedField& rhs);
470   RepeatedField(Arena* arena, RepeatedField&& rhs);
471 
472   inline Arena* GetArena(bool is_soo) const {
473     return is_soo ? soo_rep_.soo_arena() : heap_rep()->arena;
474   }
475 
476   bool is_soo() const { return soo_rep_.is_soo(); }
477   int size(bool is_soo) const { return soo_rep_.size(is_soo); }
478   int Capacity(bool is_soo) const {
479 #if !defined(__clang__) && defined(__GNUC__)
480 #pragma GCC diagnostic push
481 #pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
482 #endif
483     return is_soo ? kSooCapacityElements : soo_rep_.long_rep.capacity;
484 #if !defined(__clang__) && defined(__GNUC__)
485 #pragma GCC diagnostic pop
486 #endif
487   }
488   void set_size(bool is_soo, int size) {
489     ABSL_DCHECK_LE(size, Capacity(is_soo));
490     soo_rep_.set_size(is_soo, size);
491   }
492 
493   // Swaps entire contents with "other". Should be called only if the caller can
494   // guarantee that both repeated fields are on the same arena or are on the
495   // heap. Swapping between different arenas is disallowed and caught by a
496   // ABSL_DCHECK (see API docs for details).
497   void UnsafeArenaSwap(RepeatedField* other);
498 
499   // Copy constructs `n` instances in place into the array `dst`.
500   // This function is identical to `std::uninitialized_copy_n(src, n, dst)`
501   // except that we explicit declare the memory to not be aliased, which will
502   // result in `memcpy` code generation instead of `memmove` for trivial types.
503   static inline void UninitializedCopyN(const Element* PROTOBUF_RESTRICT src,
504                                         int n, Element* PROTOBUF_RESTRICT dst) {
505     std::uninitialized_copy_n(src, n, dst);
506   }
507 
508   // Copy constructs `[begin, end)` instances in place into the array `dst`.
509   // See above `UninitializedCopyN()` function comments for more information.
510   template <typename Iter>
511   static inline void UninitializedCopy(Iter begin, Iter end,
512                                        Element* PROTOBUF_RESTRICT dst) {
513     std::uninitialized_copy(begin, end, dst);
514   }
515 
516   // Destroys all elements in [begin, end).
517   // This function does nothing if `Element` is trivial.
518   static void Destroy(const Element* begin, const Element* end) {
519     if (!std::is_trivial<Element>::value) {
520       std::for_each(begin, end, [&](const Element& e) { e.~Element(); });
521     }
522   }
523 
524   template <typename Iter>
525   void AddForwardIterator(Iter begin, Iter end);
526 
527   template <typename Iter>
528   void AddInputIterator(Iter begin, Iter end);
529 
530   // Reserves space to expand the field to at least the given size.
531   // If the array is grown, it will always be at least doubled in size.
532   // If `annotate_size` is true (the default), then this function will annotate
533   // the old container from `old_size` to `Capacity()` (unpoison memory)
534   // directly before it is being released, and annotate the new container from
535   // `Capacity()` to `old_size` (poison unused memory).
536   void Grow(bool was_soo, int old_size, int new_size);
537   void GrowNoAnnotate(bool was_soo, int old_size, int new_size);
538 
539   // Annotates a change in size of this instance. This function should be called
540   // with (capacity, old_size) after new memory has been allocated and filled
541   // from previous memory, and UnpoisonBuffer() should be called right before
542   // (previously annotated) memory is released.
543   void AnnotateSize(int old_size, int new_size) const {
544     if (old_size != new_size) {
545       ABSL_ATTRIBUTE_UNUSED const bool is_soo = this->is_soo();
546       ABSL_ATTRIBUTE_UNUSED const Element* elem = unsafe_elements(is_soo);
547       ABSL_ANNOTATE_CONTIGUOUS_CONTAINER(elem, elem + Capacity(is_soo),
548                                          elem + old_size, elem + new_size);
549       if (new_size < old_size) {
550         ABSL_ANNOTATE_MEMORY_IS_UNINITIALIZED(
551             elem + new_size, (old_size - new_size) * sizeof(Element));
552       }
553     }
554   }
555 
556   // Unpoisons the memory buffer.
557   void UnpoisonBuffer() const {
558     AnnotateSize(size(), Capacity());
559     if (is_soo()) {
560       // We need to manually unpoison the SOO buffer because in reflection for
561       // split repeated fields, we poison the whole SOO buffer even when we
562       // don't actually use the whole SOO buffer (e.g. for RepeatedField<bool>).
563       PROTOBUF_UNPOISON_MEMORY_REGION(soo_rep_.short_rep.data,
564                                       sizeof(soo_rep_.short_rep.data));
565     }
566   }
567 
568   // Replaces size with new_size and returns the previous value of
569   // size. This function is intended to be the only place where
570   // size is modified, with the exception of `AddInputIterator()`
571   // where the size of added items is not known in advance.
572   inline int ExchangeCurrentSize(bool is_soo, int new_size) {
573     const int prev_size = size(is_soo);
574     AnnotateSize(prev_size, new_size);
575     set_size(is_soo, new_size);
576     return prev_size;
577   }
578 
579   // Returns a pointer to elements array.
580   // pre-condition: Capacity() > 0.
581   Element* elements(bool is_soo) {
582     ABSL_DCHECK_GT(Capacity(is_soo), 0);
583     return unsafe_elements(is_soo);
584   }
585   const Element* elements(bool is_soo) const {
586     return const_cast<RepeatedField*>(this)->elements(is_soo);
587   }
588 
589   // Returns a pointer to elements array if it exists; otherwise an invalid
590   // pointer is returned. This only happens for empty repeated fields, where you
591   // can't dereference this pointer anyway (it's empty).
592   Element* unsafe_elements(bool is_soo) {
593     return is_soo ? reinterpret_cast<Element*>(soo_rep_.short_rep.data)
594                   : reinterpret_cast<Element*>(soo_rep_.long_rep.elements());
595   }
596   const Element* unsafe_elements(bool is_soo) const {
597     return const_cast<RepeatedField*>(this)->unsafe_elements(is_soo);
598   }
599 
600   // Returns a pointer to the HeapRep struct.
601   // pre-condition: the HeapRep must have been allocated, ie !is_soo().
602   HeapRep* heap_rep() const {
603     ABSL_DCHECK(!is_soo());
604     return reinterpret_cast<HeapRep*>(soo_rep_.long_rep.elements() -
605                                       kHeapRepHeaderSize);
606   }
607 
608   // Internal helper to delete all elements and deallocate the storage.
609   template <bool in_destructor = false>
610   void InternalDeallocate() {
611     ABSL_DCHECK(!is_soo());
612     const size_t bytes = Capacity(false) * sizeof(Element) + kHeapRepHeaderSize;
613     if (heap_rep()->arena == nullptr) {
614       internal::SizedDelete(heap_rep(), bytes);
615     } else if (!in_destructor) {
616       // If we are in the destructor, we might be being destroyed as part of
617       // the arena teardown. We can't try and return blocks to the arena then.
618       heap_rep()->arena->ReturnArrayMemory(heap_rep(), bytes);
619     }
620   }
621 
622   // A note on the representation here (see also comment below for
623   // RepeatedPtrFieldBase's struct HeapRep):
624   //
625   // We maintain the same sizeof(RepeatedField) as before we added arena support
626   // so that we do not degrade performance by bloating memory usage. Directly
627   // adding an arena_ element to RepeatedField is quite costly. By using
628   // indirection in this way, we keep the same size when the RepeatedField is
629   // empty (common case), and add only an 8-byte header to the elements array
630   // when non-empty. We make sure to place the size fields directly in the
631   // RepeatedField class to avoid costly cache misses due to the indirection.
632   internal::SooRep soo_rep_{};
633 };
634 
635 // implementation ====================================================
636 
637 template <typename Element>
638 constexpr RepeatedField<Element>::RepeatedField() {
639   StaticValidityCheck();
640 #ifdef __cpp_lib_is_constant_evaluated
641   if (!std::is_constant_evaluated()) {
642     AnnotateSize(kSooCapacityElements, 0);
643   }
644 #endif  // __cpp_lib_is_constant_evaluated
645 }
646 
647 template <typename Element>
648 inline RepeatedField<Element>::RepeatedField(Arena* arena) : soo_rep_(arena) {
649   StaticValidityCheck();
650   AnnotateSize(kSooCapacityElements, 0);
651 }
652 
653 template <typename Element>
654 inline RepeatedField<Element>::RepeatedField(Arena* arena,
655                                              const RepeatedField& rhs)
656     : soo_rep_(arena) {
657   StaticValidityCheck();
658   AnnotateSize(kSooCapacityElements, 0);
659   const bool rhs_is_soo = rhs.is_soo();
660   if (auto size = rhs.size(rhs_is_soo)) {
661     bool is_soo = true;
662     if (size > kSooCapacityElements) {
663       Grow(is_soo, 0, size);
664       is_soo = false;
665     }
666     ExchangeCurrentSize(is_soo, size);
667     UninitializedCopyN(rhs.elements(rhs_is_soo), size, unsafe_elements(is_soo));
668   }
669 }
670 
671 template <typename Element>
672 template <typename Iter, typename>
673 RepeatedField<Element>::RepeatedField(Iter begin, Iter end) {
674   StaticValidityCheck();
675   AnnotateSize(kSooCapacityElements, 0);
676   Add(begin, end);
677 }
678 
679 template <typename Element>
680 RepeatedField<Element>::~RepeatedField() {
681   StaticValidityCheck();
682   const bool is_soo = this->is_soo();
683 #ifndef NDEBUG
684   // Try to trigger segfault / asan failure in non-opt builds if arena_
685   // lifetime has ended before the destructor.
686   auto arena = GetArena(is_soo);
687   if (arena) (void)arena->SpaceAllocated();
688 #endif
689   const int size = this->size(is_soo);
690   if (size > 0) {
691     Element* elem = unsafe_elements(is_soo);
692     Destroy(elem, elem + size);
693   }
694   UnpoisonBuffer();
695   if (!is_soo) InternalDeallocate<true>();
696 }
697 
698 template <typename Element>
699 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
700     const RepeatedField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND {
701   if (this != &other) CopyFrom(other);
702   return *this;
703 }
704 
705 template <typename Element>
706 inline RepeatedField<Element>::RepeatedField(Arena* arena, RepeatedField&& rhs)
707     : RepeatedField(arena) {
708   if (internal::CanMoveWithInternalSwap(arena, rhs.GetArena())) {
709     InternalSwap(&rhs);
710   } else {
711     // We don't just call Swap(&rhs) here because it would perform 3 copies if
712     // rhs is on a different arena.
713     CopyFrom(rhs);
714   }
715 }
716 
717 template <typename Element>
718 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
719     RepeatedField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND {
720   // We don't just call Swap(&other) here because it would perform 3 copies if
721   // the two fields are on different arenas.
722   if (this != &other) {
723     if (internal::CanMoveWithInternalSwap(GetArena(), other.GetArena())) {
724       InternalSwap(&other);
725     } else {
726       CopyFrom(other);
727     }
728   }
729   return *this;
730 }
731 
732 template <typename Element>
733 inline bool RepeatedField<Element>::empty() const {
734   return size() == 0;
735 }
736 
737 template <typename Element>
738 inline int RepeatedField<Element>::size() const {
739   return size(is_soo());
740 }
741 
742 template <typename Element>
743 inline int RepeatedField<Element>::Capacity() const {
744   return Capacity(is_soo());
745 }
746 
747 template <typename Element>
748 inline void RepeatedField<Element>::AddAlreadyReserved(Element value) {
749   const bool is_soo = this->is_soo();
750   const int old_size = size(is_soo);
751   ABSL_DCHECK_LT(old_size, Capacity(is_soo));
752   void* p = elements(is_soo) + ExchangeCurrentSize(is_soo, old_size + 1);
753   ::new (p) Element(std::move(value));
754 }
755 
756 template <typename Element>
757 inline Element* RepeatedField<Element>::AddAlreadyReserved()
758     ABSL_ATTRIBUTE_LIFETIME_BOUND {
759   const bool is_soo = this->is_soo();
760   const int old_size = size(is_soo);
761   ABSL_DCHECK_LT(old_size, Capacity(is_soo));
762   // new (p) <TrivialType> compiles into nothing: this is intentional as this
763   // function is documented to return uninitialized data for trivial types.
764   void* p = elements(is_soo) + ExchangeCurrentSize(is_soo, old_size + 1);
765   return ::new (p) Element;
766 }
767 
768 template <typename Element>
769 inline Element* RepeatedField<Element>::AddNAlreadyReserved(int n)
770     ABSL_ATTRIBUTE_LIFETIME_BOUND {
771   const bool is_soo = this->is_soo();
772   const int old_size = size(is_soo);
773   ABSL_ATTRIBUTE_UNUSED const int capacity = Capacity(is_soo);
774   ABSL_DCHECK_GE(capacity - old_size, n) << capacity << ", " << old_size;
775   Element* p =
776       unsafe_elements(is_soo) + ExchangeCurrentSize(is_soo, old_size + n);
777   for (Element *begin = p, *end = p + n; begin != end; ++begin) {
778     new (static_cast<void*>(begin)) Element;
779   }
780   return p;
781 }
782 
783 template <typename Element>
784 inline void RepeatedField<Element>::Resize(int new_size, const Element& value) {
785   ABSL_DCHECK_GE(new_size, 0);
786   bool is_soo = this->is_soo();
787   const int old_size = size(is_soo);
788   if (new_size > old_size) {
789     if (new_size > Capacity(is_soo)) {
790       Grow(is_soo, old_size, new_size);
791       is_soo = false;
792     }
793     Element* elem = elements(is_soo);
794     Element* first = elem + ExchangeCurrentSize(is_soo, new_size);
795     std::uninitialized_fill(first, elem + new_size, value);
796   } else if (new_size < old_size) {
797     Element* elem = unsafe_elements(is_soo);
798     Destroy(elem + new_size, elem + old_size);
799     ExchangeCurrentSize(is_soo, new_size);
800   }
801 }
802 
803 template <typename Element>
804 inline const Element& RepeatedField<Element>::Get(int index) const
805     ABSL_ATTRIBUTE_LIFETIME_BOUND {
806   ABSL_DCHECK_GE(index, 0);
807   ABSL_DCHECK_LT(index, size());
808   return elements(is_soo())[index];
809 }
810 
811 template <typename Element>
812 inline const Element& RepeatedField<Element>::at(int index) const
813     ABSL_ATTRIBUTE_LIFETIME_BOUND {
814   ABSL_CHECK_GE(index, 0);
815   ABSL_CHECK_LT(index, size());
816   return elements(is_soo())[index];
817 }
818 
819 template <typename Element>
820 inline Element& RepeatedField<Element>::at(int index)
821     ABSL_ATTRIBUTE_LIFETIME_BOUND {
822   ABSL_CHECK_GE(index, 0);
823   ABSL_CHECK_LT(index, size());
824   return elements(is_soo())[index];
825 }
826 
827 template <typename Element>
828 inline Element* RepeatedField<Element>::Mutable(int index)
829     ABSL_ATTRIBUTE_LIFETIME_BOUND {
830   ABSL_DCHECK_GE(index, 0);
831   ABSL_DCHECK_LT(index, size());
832   return &elements(is_soo())[index];
833 }
834 
835 template <typename Element>
836 inline void RepeatedField<Element>::Set(int index, const Element& value) {
837   *Mutable(index) = value;
838 }
839 
840 template <typename Element>
841 inline void RepeatedField<Element>::Add(Element value) {
842   bool is_soo = this->is_soo();
843   const int old_size = size(is_soo);
844   int capacity = Capacity(is_soo);
845   Element* elem = unsafe_elements(is_soo);
846   if (ABSL_PREDICT_FALSE(old_size == capacity)) {
847     Grow(is_soo, old_size, old_size + 1);
848     is_soo = false;
849     capacity = Capacity(is_soo);
850     elem = unsafe_elements(is_soo);
851   }
852   int new_size = old_size + 1;
853   void* p = elem + ExchangeCurrentSize(is_soo, new_size);
854   ::new (p) Element(std::move(value));
855 
856   // The below helps the compiler optimize dense loops.
857   // Note: we can't call functions in PROTOBUF_ASSUME so use local variables.
858   ABSL_ATTRIBUTE_UNUSED const bool final_is_soo = this->is_soo();
859   PROTOBUF_ASSUME(is_soo == final_is_soo);
860   ABSL_ATTRIBUTE_UNUSED const int final_size = size(is_soo);
861   PROTOBUF_ASSUME(new_size == final_size);
862   ABSL_ATTRIBUTE_UNUSED Element* const final_elements = unsafe_elements(is_soo);
863   PROTOBUF_ASSUME(elem == final_elements);
864   ABSL_ATTRIBUTE_UNUSED const int final_capacity = Capacity(is_soo);
865   PROTOBUF_ASSUME(capacity == final_capacity);
866 }
867 
868 template <typename Element>
869 inline Element* RepeatedField<Element>::Add() ABSL_ATTRIBUTE_LIFETIME_BOUND {
870   bool is_soo = this->is_soo();
871   const int old_size = size(is_soo);
872   if (ABSL_PREDICT_FALSE(old_size == Capacity())) {
873     Grow(is_soo, old_size, old_size + 1);
874     is_soo = false;
875   }
876   void* p = unsafe_elements(is_soo) + ExchangeCurrentSize(is_soo, old_size + 1);
877   return ::new (p) Element;
878 }
879 
880 template <typename Element>
881 template <typename Iter>
882 inline void RepeatedField<Element>::AddForwardIterator(Iter begin, Iter end) {
883   bool is_soo = this->is_soo();
884   const int old_size = size(is_soo);
885   int capacity = Capacity(is_soo);
886   Element* elem = unsafe_elements(is_soo);
887   int new_size = old_size + static_cast<int>(std::distance(begin, end));
888   if (ABSL_PREDICT_FALSE(new_size > capacity)) {
889     Grow(is_soo, old_size, new_size);
890     is_soo = false;
891     elem = unsafe_elements(is_soo);
892     capacity = Capacity(is_soo);
893   }
894   UninitializedCopy(begin, end, elem + ExchangeCurrentSize(is_soo, new_size));
895 
896   // The below helps the compiler optimize dense loops.
897   // Note: we can't call functions in PROTOBUF_ASSUME so use local variables.
898   ABSL_ATTRIBUTE_UNUSED const bool final_is_soo = this->is_soo();
899   PROTOBUF_ASSUME(is_soo == final_is_soo);
900   ABSL_ATTRIBUTE_UNUSED const int final_size = size(is_soo);
901   PROTOBUF_ASSUME(new_size == final_size);
902   ABSL_ATTRIBUTE_UNUSED Element* const final_elements = unsafe_elements(is_soo);
903   PROTOBUF_ASSUME(elem == final_elements);
904   ABSL_ATTRIBUTE_UNUSED const int final_capacity = Capacity(is_soo);
905   PROTOBUF_ASSUME(capacity == final_capacity);
906 }
907 
908 template <typename Element>
909 template <typename Iter>
910 inline void RepeatedField<Element>::AddInputIterator(Iter begin, Iter end) {
911   bool is_soo = this->is_soo();
912   int size = this->size(is_soo);
913   int capacity = Capacity(is_soo);
914   Element* elem = unsafe_elements(is_soo);
915   Element* first = elem + size;
916   Element* last = elem + capacity;
917   UnpoisonBuffer();
918 
919   while (begin != end) {
920     if (ABSL_PREDICT_FALSE(first == last)) {
921       size = first - elem;
922       GrowNoAnnotate(is_soo, size, size + 1);
923       is_soo = false;
924       elem = unsafe_elements(is_soo);
925       capacity = Capacity(is_soo);
926       first = elem + size;
927       last = elem + capacity;
928     }
929     ::new (static_cast<void*>(first)) Element(*begin);
930     ++begin;
931     ++first;
932   }
933 
934   const int new_size = first - elem;
935   set_size(is_soo, new_size);
936   AnnotateSize(capacity, new_size);
937 }
938 
939 template <typename Element>
940 template <typename Iter>
941 inline void RepeatedField<Element>::Add(Iter begin, Iter end) {
942   if (std::is_base_of<
943           std::forward_iterator_tag,
944           typename std::iterator_traits<Iter>::iterator_category>::value) {
945     AddForwardIterator(begin, end);
946   } else {
947     AddInputIterator(begin, end);
948   }
949 }
950 
951 template <typename Element>
952 inline void RepeatedField<Element>::RemoveLast() {
953   const bool is_soo = this->is_soo();
954   const int old_size = size(is_soo);
955   ABSL_DCHECK_GT(old_size, 0);
956   elements(is_soo)[old_size - 1].~Element();
957   ExchangeCurrentSize(is_soo, old_size - 1);
958 }
959 
960 template <typename Element>
961 void RepeatedField<Element>::ExtractSubrange(int start, int num,
962                                              Element* elements) {
963   ABSL_DCHECK_GE(start, 0);
964   ABSL_DCHECK_GE(num, 0);
965   const bool is_soo = this->is_soo();
966   const int old_size = size(is_soo);
967   ABSL_DCHECK_LE(start + num, old_size);
968   Element* elem = unsafe_elements(is_soo);
969 
970   // Save the values of the removed elements if requested.
971   if (elements != nullptr) {
972     for (int i = 0; i < num; ++i) elements[i] = std::move(elem[i + start]);
973   }
974 
975   // Slide remaining elements down to fill the gap.
976   if (num > 0) {
977     for (int i = start + num; i < old_size; ++i)
978       elem[i - num] = std::move(elem[i]);
979     Truncate(old_size - num);
980   }
981 }
982 
983 template <typename Element>
984 inline void RepeatedField<Element>::Clear() {
985   const bool is_soo = this->is_soo();
986   Element* elem = unsafe_elements(is_soo);
987   Destroy(elem, elem + size(is_soo));
988   ExchangeCurrentSize(is_soo, 0);
989 }
990 
991 template <typename Element>
992 inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other) {
993   ABSL_DCHECK_NE(&other, this);
994   const bool other_is_soo = other.is_soo();
995   if (auto other_size = other.size(other_is_soo)) {
996     const int old_size = size();
997     Reserve(old_size + other_size);
998     const bool is_soo = this->is_soo();
999     Element* dst =
1000         elements(is_soo) + ExchangeCurrentSize(is_soo, old_size + other_size);
1001     UninitializedCopyN(other.elements(other_is_soo), other_size, dst);
1002   }
1003 }
1004 
1005 template <typename Element>
1006 inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other) {
1007   if (&other == this) return;
1008   Clear();
1009   MergeFrom(other);
1010 }
1011 
1012 template <typename Element>
1013 template <typename Iter>
1014 inline void RepeatedField<Element>::Assign(Iter begin, Iter end) {
1015   Clear();
1016   Add(begin, end);
1017 }
1018 
1019 template <typename Element>
1020 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1021     const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND {
1022   return erase(position, position + 1);
1023 }
1024 
1025 template <typename Element>
1026 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1027     const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND {
1028   size_type first_offset = first - cbegin();
1029   if (first != last) {
1030     Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin());
1031   }
1032   return begin() + first_offset;
1033 }
1034 
1035 template <typename Element>
1036 inline Element* RepeatedField<Element>::mutable_data()
1037     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1038   return unsafe_elements(is_soo());
1039 }
1040 
1041 template <typename Element>
1042 inline const Element* RepeatedField<Element>::data() const
1043     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1044   return unsafe_elements(is_soo());
1045 }
1046 
1047 template <typename Element>
1048 inline void RepeatedField<Element>::InternalSwap(
1049     RepeatedField* PROTOBUF_RESTRICT other) {
1050   ABSL_DCHECK(this != other);
1051 
1052   // We need to unpoison during the swap in case we're in SOO mode.
1053   UnpoisonBuffer();
1054   other->UnpoisonBuffer();
1055 
1056   internal::memswap<sizeof(internal::SooRep)>(
1057       reinterpret_cast<char*>(&this->soo_rep_),
1058       reinterpret_cast<char*>(&other->soo_rep_));
1059 
1060   AnnotateSize(Capacity(), size());
1061   other->AnnotateSize(other->Capacity(), other->size());
1062 }
1063 
1064 template <typename Element>
1065 void RepeatedField<Element>::Swap(RepeatedField* other) {
1066   if (this == other) return;
1067   Arena* arena = GetArena();
1068   Arena* other_arena = other->GetArena();
1069   if (internal::CanUseInternalSwap(arena, other_arena)) {
1070     InternalSwap(other);
1071   } else {
1072     RepeatedField<Element> temp(other_arena);
1073     temp.MergeFrom(*this);
1074     CopyFrom(*other);
1075     other->UnsafeArenaSwap(&temp);
1076   }
1077 }
1078 
1079 template <typename Element>
1080 void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other) {
1081   if (this == other) return;
1082   ABSL_DCHECK_EQ(GetArena(), other->GetArena());
1083   InternalSwap(other);
1084 }
1085 
1086 template <typename Element>
1087 void RepeatedField<Element>::SwapElements(int index1, int index2) {
1088   Element* elem = elements(is_soo());
1089   using std::swap;  // enable ADL with fallback
1090   swap(elem[index1], elem[index2]);
1091 }
1092 
1093 template <typename Element>
1094 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::begin()
1095     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1096   return iterator(unsafe_elements(is_soo()));
1097 }
1098 template <typename Element>
1099 inline typename RepeatedField<Element>::const_iterator
1100 RepeatedField<Element>::begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1101   return const_iterator(unsafe_elements(is_soo()));
1102 }
1103 template <typename Element>
1104 inline typename RepeatedField<Element>::const_iterator
1105 RepeatedField<Element>::cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1106   return const_iterator(unsafe_elements(is_soo()));
1107 }
1108 template <typename Element>
1109 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end()
1110     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1111   const bool is_soo = this->is_soo();
1112   return iterator(unsafe_elements(is_soo) + size(is_soo));
1113 }
1114 template <typename Element>
1115 inline typename RepeatedField<Element>::const_iterator
1116 RepeatedField<Element>::end() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1117   const bool is_soo = this->is_soo();
1118   return const_iterator(unsafe_elements(is_soo) + size(is_soo));
1119 }
1120 template <typename Element>
1121 inline typename RepeatedField<Element>::const_iterator
1122 RepeatedField<Element>::cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1123   const bool is_soo = this->is_soo();
1124   return const_iterator(unsafe_elements(is_soo) + size(is_soo));
1125 }
1126 
1127 template <typename Element>
1128 inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
1129   const int capacity = Capacity();
1130   return capacity > kSooCapacityElements
1131              ? capacity * sizeof(Element) + kHeapRepHeaderSize
1132              : 0;
1133 }
1134 
1135 namespace internal {
1136 // Returns the new size for a reserved field based on its 'capacity' and the
1137 // requested 'new_size'. The result is clamped to the closed interval:
1138 //   [internal::kMinRepeatedFieldAllocationSize,
1139 //    std::numeric_limits<int>::max()]
1140 // Requires: new_size > capacity
1141 template <typename T, int kHeapRepHeaderSize>
1142 inline int CalculateReserveSize(int capacity, int new_size) {
1143   constexpr int lower_limit =
1144       RepeatedFieldLowerClampLimit<T, kHeapRepHeaderSize>();
1145   if (new_size < lower_limit) {
1146     // Clamp to smallest allowed size.
1147     return lower_limit;
1148   }
1149   constexpr int kMaxSizeBeforeClamp =
1150       (std::numeric_limits<int>::max() - kHeapRepHeaderSize) / 2;
1151   if (PROTOBUF_PREDICT_FALSE(capacity > kMaxSizeBeforeClamp)) {
1152     return std::numeric_limits<int>::max();
1153   }
1154   constexpr int kSooCapacityElements = SooCapacityElements(sizeof(T));
1155   if (kSooCapacityElements > 0 && kSooCapacityElements < lower_limit) {
1156     // In this case, we need to set capacity to 0 here to ensure power-of-two
1157     // sized allocations.
1158     if (capacity < lower_limit) capacity = 0;
1159   } else {
1160     ABSL_DCHECK(capacity == 0 || capacity >= lower_limit)
1161         << capacity << " " << lower_limit;
1162   }
1163   // We want to double the number of bytes, not the number of elements, to try
1164   // to stay within power-of-two allocations.
1165   // The allocation has kHeapRepHeaderSize + sizeof(T) * capacity.
1166   int doubled_size = 2 * capacity + kHeapRepHeaderSize / sizeof(T);
1167   return std::max(doubled_size, new_size);
1168 }
1169 }  // namespace internal
1170 
1171 template <typename Element>
1172 void RepeatedField<Element>::Reserve(int new_size) {
1173   const bool was_soo = is_soo();
1174   if (ABSL_PREDICT_FALSE(new_size > Capacity(was_soo))) {
1175     Grow(was_soo, size(was_soo), new_size);
1176   }
1177 }
1178 
1179 // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
1180 // amount of code bloat.
1181 template <typename Element>
1182 PROTOBUF_NOINLINE void RepeatedField<Element>::GrowNoAnnotate(bool was_soo,
1183                                                               int old_size,
1184                                                               int new_size) {
1185   const int old_capacity = Capacity(was_soo);
1186   ABSL_DCHECK_GT(new_size, old_capacity);
1187   HeapRep* new_rep;
1188   Arena* arena = GetArena();
1189 
1190   new_size = internal::CalculateReserveSize<Element, kHeapRepHeaderSize>(
1191       old_capacity, new_size);
1192 
1193   ABSL_DCHECK_LE(static_cast<size_t>(new_size),
1194                  (std::numeric_limits<size_t>::max() - kHeapRepHeaderSize) /
1195                      sizeof(Element))
1196       << "Requested size is too large to fit into size_t.";
1197   size_t bytes =
1198       kHeapRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
1199   if (arena == nullptr) {
1200     ABSL_DCHECK_LE((bytes - kHeapRepHeaderSize) / sizeof(Element),
1201                    static_cast<size_t>(std::numeric_limits<int>::max()))
1202         << "Requested size is too large to fit element count into int.";
1203     internal::SizedPtr res = internal::AllocateAtLeast(bytes);
1204     size_t num_available =
1205         std::min((res.n - kHeapRepHeaderSize) / sizeof(Element),
1206                  static_cast<size_t>(std::numeric_limits<int>::max()));
1207     new_size = static_cast<int>(num_available);
1208     new_rep = static_cast<HeapRep*>(res.p);
1209   } else {
1210     new_rep =
1211         reinterpret_cast<HeapRep*>(Arena::CreateArray<char>(arena, bytes));
1212   }
1213   new_rep->arena = arena;
1214 
1215   if (old_size > 0) {
1216     Element* pnew = static_cast<Element*>(new_rep->elements());
1217     Element* pold = elements(was_soo);
1218     // TODO: add absl::is_trivially_relocatable<Element>
1219     if (std::is_trivial<Element>::value) {
1220       memcpy(static_cast<void*>(pnew), pold, old_size * sizeof(Element));
1221     } else {
1222       for (Element* end = pnew + old_size; pnew != end; ++pnew, ++pold) {
1223         ::new (static_cast<void*>(pnew)) Element(std::move(*pold));
1224         pold->~Element();
1225       }
1226     }
1227   }
1228   if (!was_soo) InternalDeallocate();
1229 
1230   soo_rep_.set_non_soo(was_soo, new_size, new_rep->elements());
1231 }
1232 
1233 // Ideally we would be able to use:
1234 //   template <bool annotate_size = true>
1235 //   void Grow();
1236 // However, as explained in b/266411038#comment9, this causes issues
1237 // in shared libraries for Youtube (and possibly elsewhere).
1238 template <typename Element>
1239 PROTOBUF_NOINLINE void RepeatedField<Element>::Grow(bool was_soo, int old_size,
1240                                                     int new_size) {
1241   UnpoisonBuffer();
1242   GrowNoAnnotate(was_soo, old_size, new_size);
1243   AnnotateSize(Capacity(), old_size);
1244 }
1245 
1246 template <typename Element>
1247 inline void RepeatedField<Element>::Truncate(int new_size) {
1248   const bool is_soo = this->is_soo();
1249   const int old_size = size(is_soo);
1250   ABSL_DCHECK_LE(new_size, old_size);
1251   if (new_size < old_size) {
1252     Element* elem = unsafe_elements(is_soo);
1253     Destroy(elem + new_size, elem + old_size);
1254     ExchangeCurrentSize(is_soo, new_size);
1255   }
1256 }
1257 
1258 template <>
1259 PROTOBUF_EXPORT size_t
1260 RepeatedField<absl::Cord>::SpaceUsedExcludingSelfLong() const;
1261 
1262 
1263 // -------------------------------------------------------------------
1264 
1265 // Iterators and helper functions that follow the spirit of the STL
1266 // std::back_insert_iterator and std::back_inserter but are tailor-made
1267 // for RepeatedField and RepeatedPtrField. Typical usage would be:
1268 //
1269 //   std::copy(some_sequence.begin(), some_sequence.end(),
1270 //             RepeatedFieldBackInserter(proto.mutable_sequence()));
1271 //
1272 // Ported by johannes from util/gtl/proto-array-iterators.h
1273 
1274 namespace internal {
1275 
1276 // STL-like iterator implementation for RepeatedField.  You should not
1277 // refer to this class directly; use RepeatedField<T>::iterator instead.
1278 //
1279 // Note: All of the iterator operators *must* be inlined to avoid performance
1280 // regressions.  This is caused by the extern template declarations below (which
1281 // are required because of the RepeatedField extern template declarations).  If
1282 // any of these functions aren't explicitly inlined (e.g. defined in the class),
1283 // the compiler isn't allowed to inline them.
1284 template <typename Element>
1285 class RepeatedIterator {
1286  private:
1287   using traits =
1288       std::iterator_traits<typename std::remove_const<Element>::type*>;
1289 
1290  public:
1291   // Note: value_type is never cv-qualified.
1292   using value_type = typename traits::value_type;
1293   using difference_type = typename traits::difference_type;
1294   using pointer = Element*;
1295   using reference = Element&;
1296   using iterator_category = typename traits::iterator_category;
1297   using iterator_concept = typename IteratorConceptSupport<traits>::tag;
1298 
1299   constexpr RepeatedIterator() noexcept : it_(nullptr) {}
1300 
1301   // Allows "upcasting" from RepeatedIterator<T**> to
1302   // RepeatedIterator<const T*const*>.
1303   template <typename OtherElement,
1304             typename std::enable_if<std::is_convertible<
1305                 OtherElement*, pointer>::value>::type* = nullptr>
1306   constexpr RepeatedIterator(
1307       const RepeatedIterator<OtherElement>& other) noexcept
1308       : it_(other.it_) {}
1309 
1310   // dereferenceable
1311   constexpr reference operator*() const noexcept { return *it_; }
1312   constexpr pointer operator->() const noexcept { return it_; }
1313 
1314  private:
1315   // Helper alias to hide the internal type.
1316   using iterator = RepeatedIterator<Element>;
1317 
1318  public:
1319   // {inc,dec}rementable
1320   iterator& operator++() noexcept {
1321     ++it_;
1322     return *this;
1323   }
1324   iterator operator++(int) noexcept { return iterator(it_++); }
1325   iterator& operator--() noexcept {
1326     --it_;
1327     return *this;
1328   }
1329   iterator operator--(int) noexcept { return iterator(it_--); }
1330 
1331   // equality_comparable
1332   friend constexpr bool operator==(const iterator& x,
1333                                    const iterator& y) noexcept {
1334     return x.it_ == y.it_;
1335   }
1336   friend constexpr bool operator!=(const iterator& x,
1337                                    const iterator& y) noexcept {
1338     return x.it_ != y.it_;
1339   }
1340 
1341   // less_than_comparable
1342   friend constexpr bool operator<(const iterator& x,
1343                                   const iterator& y) noexcept {
1344     return x.it_ < y.it_;
1345   }
1346   friend constexpr bool operator<=(const iterator& x,
1347                                    const iterator& y) noexcept {
1348     return x.it_ <= y.it_;
1349   }
1350   friend constexpr bool operator>(const iterator& x,
1351                                   const iterator& y) noexcept {
1352     return x.it_ > y.it_;
1353   }
1354   friend constexpr bool operator>=(const iterator& x,
1355                                    const iterator& y) noexcept {
1356     return x.it_ >= y.it_;
1357   }
1358 
1359   // addable, subtractable
1360   iterator& operator+=(difference_type d) noexcept {
1361     it_ += d;
1362     return *this;
1363   }
1364   constexpr iterator operator+(difference_type d) const noexcept {
1365     return iterator(it_ + d);
1366   }
1367   friend constexpr iterator operator+(const difference_type d,
1368                                       iterator it) noexcept {
1369     return it + d;
1370   }
1371 
1372   iterator& operator-=(difference_type d) noexcept {
1373     it_ -= d;
1374     return *this;
1375   }
1376   iterator constexpr operator-(difference_type d) const noexcept {
1377     return iterator(it_ - d);
1378   }
1379 
1380   // indexable
1381   constexpr reference operator[](difference_type d) const noexcept {
1382     return it_[d];
1383   }
1384 
1385   // random access iterator
1386   friend constexpr difference_type operator-(iterator it1,
1387                                              iterator it2) noexcept {
1388     return it1.it_ - it2.it_;
1389   }
1390 
1391  private:
1392   template <typename OtherElement>
1393   friend class RepeatedIterator;
1394 
1395   // Allow construction from RepeatedField.
1396   friend class RepeatedField<value_type>;
1397   explicit RepeatedIterator(pointer it) noexcept : it_(it) {}
1398 
1399   // The internal iterator.
1400   pointer it_;
1401 };
1402 
1403 // A back inserter for RepeatedField objects.
1404 template <typename T>
1405 class RepeatedFieldBackInsertIterator {
1406  public:
1407   using iterator_category = std::output_iterator_tag;
1408   using value_type = T;
1409   using pointer = void;
1410   using reference = void;
1411   using difference_type = std::ptrdiff_t;
1412 
1413   explicit RepeatedFieldBackInsertIterator(
1414       RepeatedField<T>* const mutable_field)
1415       : field_(mutable_field) {}
1416   RepeatedFieldBackInsertIterator<T>& operator=(const T& value) {
1417     field_->Add(value);
1418     return *this;
1419   }
1420   RepeatedFieldBackInsertIterator<T>& operator*() { return *this; }
1421   RepeatedFieldBackInsertIterator<T>& operator++() { return *this; }
1422   RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */) {
1423     return *this;
1424   }
1425 
1426  private:
1427   RepeatedField<T>* field_;
1428 };
1429 
1430 }  // namespace internal
1431 
1432 // Provides a back insert iterator for RepeatedField instances,
1433 // similar to std::back_inserter().
1434 template <typename T>
1435 internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
1436     RepeatedField<T>* const mutable_field) {
1437   return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
1438 }
1439 
1440 
1441 }  // namespace protobuf
1442 }  // namespace google
1443 
1444 #include "google/protobuf/port_undef.inc"
1445 
1446 #endif  // GOOGLE_PROTOBUF_REPEATED_FIELD_H__
1447