• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc.  All rights reserved.
3 //
4 // Use of this source code is governed by a BSD-style
5 // license that can be found in the LICENSE file or at
6 // https://developers.google.com/open-source/licenses/bsd
7 
8 // Author: kenton@google.com (Kenton Varda)
9 //  Based on original Protocol Buffers design by
10 //  Sanjay Ghemawat, Jeff Dean, and others.
11 //
12 // RepeatedField and RepeatedPtrField are used by generated protocol message
13 // classes to manipulate repeated fields.  These classes are very similar to
14 // STL's vector, but include a number of optimizations found to be useful
15 // specifically in the case of Protocol Buffers.  RepeatedPtrField is
16 // particularly different from STL vector as it manages ownership of the
17 // pointers that it contains.
18 //
19 // This header covers RepeatedPtrField.
20 
21 #ifndef GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__
22 #define GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__
23 
24 #include <algorithm>
25 #include <cstddef>
26 #include <cstdint>
27 #include <iterator>
28 #include <limits>
29 #include <new>
30 #include <string>
31 #include <type_traits>
32 #include <utility>
33 
34 #include "absl/base/attributes.h"
35 #include "absl/base/prefetch.h"
36 #include "absl/log/absl_check.h"
37 #include "absl/meta/type_traits.h"
38 #include "google/protobuf/arena.h"
39 #include "google/protobuf/internal_visibility.h"
40 #include "google/protobuf/message_lite.h"
41 #include "google/protobuf/port.h"
42 
43 // Must be included last.
44 #include "google/protobuf/port_def.inc"
45 
46 #ifdef SWIG
47 #error "You cannot SWIG proto headers"
48 #endif
49 
50 namespace google {
51 namespace protobuf {
52 
53 class Message;
54 class Reflection;
55 
56 template <typename T>
57 struct WeakRepeatedPtrField;
58 
59 namespace internal {
60 
61 class MergePartialFromCodedStreamHelper;
62 class SwapFieldHelper;
63 
64 
65 }  // namespace internal
66 
67 namespace internal {
68 template <typename It>
69 class RepeatedPtrIterator;
70 template <typename It, typename VoidPtr>
71 class RepeatedPtrOverPtrsIterator;
72 }  // namespace internal
73 
74 namespace internal {
75 
76 // Swaps two non-overlapping blocks of memory of size `N`
77 template <size_t N>
memswap(char * PROTOBUF_RESTRICT a,char * PROTOBUF_RESTRICT b)78 inline void memswap(char* PROTOBUF_RESTRICT a, char* PROTOBUF_RESTRICT b) {
79   // `PROTOBUF_RESTRICT` tells compiler that blocks do not overlapping which
80   // allows it to generate optimized code for swap_ranges.
81   std::swap_ranges(a, a + N, b);
82 }
83 
84 template <typename T>
85 struct IsMovable
86     : std::integral_constant<bool, std::is_move_constructible<T>::value &&
87                                        std::is_move_assignable<T>::value> {};
88 
89 // A trait that tells offset of `T::arena_`.
90 //
91 // Do not use this struct - it exists for internal use only.
92 template <typename T>
93 struct ArenaOffsetHelper {
94   static constexpr size_t value = offsetof(T, arena_);
95 };
96 
97 // Defined further below.
98 template <typename Type>
99 class GenericTypeHandler;
100 
101 // This is the common base class for RepeatedPtrFields.  It deals only in void*
102 // pointers.  Users should not use this interface directly.
103 //
104 // The methods of this interface correspond to the methods of RepeatedPtrField,
105 // but may have a template argument called TypeHandler.  Its signature is:
106 //   class TypeHandler {
107 //    public:
108 //     using Type = MyType;
109 //     using Movable = ...;
110 //
111 //     static Type*(*)(Arena*) GetNewFunc();
112 //     static void GetArena(Type* value);
113 //
114 //     static Type* New(Arena* arena);
115 //     static Type* New(Arena* arena, Type&& value);
116 //     static Type* NewFromPrototype(const Type* prototype, Arena* arena);
117 //     static void Delete(Type*, Arena* arena);
118 //     static void Clear(Type*);
119 //     static void Merge(const Type& from, Type* to);
120 //
121 //     // Only needs to be implemented if SpaceUsedExcludingSelf() is called.
122 //     static int SpaceUsedLong(const Type&);
123 //   };
124 class PROTOBUF_EXPORT RepeatedPtrFieldBase {
125   template <typename TypeHandler>
126   using Value = typename TypeHandler::Type;
127 
128   static constexpr int kSSOCapacity = 1;
129 
130   using ElementFactory = void* (*)(Arena*);
131 
132  protected:
133   // We use the same TypeHandler for all Message types to deduplicate generated
134   // code.
135   template <typename TypeHandler>
136   using CommonHandler = typename std::conditional<
137       std::is_base_of<MessageLite, Value<TypeHandler>>::value,
138       GenericTypeHandler<MessageLite>, TypeHandler>::type;
139 
RepeatedPtrFieldBase()140   constexpr RepeatedPtrFieldBase()
141       : tagged_rep_or_elem_(nullptr),
142         current_size_(0),
143         capacity_proxy_(0),
144         arena_(nullptr) {}
RepeatedPtrFieldBase(Arena * arena)145   explicit RepeatedPtrFieldBase(Arena* arena)
146       : tagged_rep_or_elem_(nullptr),
147         current_size_(0),
148         capacity_proxy_(0),
149         arena_(arena) {}
150 
151   RepeatedPtrFieldBase(const RepeatedPtrFieldBase&) = delete;
152   RepeatedPtrFieldBase& operator=(const RepeatedPtrFieldBase&) = delete;
153 
~RepeatedPtrFieldBase()154   ~RepeatedPtrFieldBase() {
155 #ifndef NDEBUG
156     // Try to trigger segfault / asan failure in non-opt builds if arena_
157     // lifetime has ended before the destructor.
158     if (arena_) (void)arena_->SpaceAllocated();
159 #endif
160   }
161 
empty()162   bool empty() const { return current_size_ == 0; }
size()163   int size() const { return current_size_; }
164   // Returns the size of the buffer with pointers to elements.
165   //
166   // Note:
167   //
168   //   * prefer `SizeAtCapacity()` to `size() == Capacity()`;
169   //   * prefer `AllocatedSizeAtCapacity()` to `allocated_size() == Capacity()`.
Capacity()170   int Capacity() const { return capacity_proxy_ + kSSOCapacity; }
171 
172   template <typename TypeHandler>
at(int index)173   const Value<TypeHandler>& at(int index) const {
174     ABSL_CHECK_GE(index, 0);
175     ABSL_CHECK_LT(index, current_size_);
176     return *cast<TypeHandler>(element_at(index));
177   }
178 
179   template <typename TypeHandler>
at(int index)180   Value<TypeHandler>& at(int index) {
181     ABSL_CHECK_GE(index, 0);
182     ABSL_CHECK_LT(index, current_size_);
183     return *cast<TypeHandler>(element_at(index));
184   }
185 
186   template <typename TypeHandler>
Mutable(int index)187   Value<TypeHandler>* Mutable(int index) {
188     ABSL_DCHECK_GE(index, 0);
189     ABSL_DCHECK_LT(index, current_size_);
190     return cast<TypeHandler>(element_at(index));
191   }
192 
193   template <typename TypeHandler>
Add()194   Value<TypeHandler>* Add() {
195     if (std::is_same<Value<TypeHandler>, std::string>{}) {
196       return cast<TypeHandler>(AddString());
197     }
198     return cast<TypeHandler>(AddMessageLite(TypeHandler::GetNewFunc()));
199   }
200 
201   template <
202       typename TypeHandler,
203       typename std::enable_if<TypeHandler::Movable::value>::type* = nullptr>
Add(Value<TypeHandler> && value)204   inline void Add(Value<TypeHandler>&& value) {
205     if (current_size_ < allocated_size()) {
206       *cast<TypeHandler>(element_at(ExchangeCurrentSize(current_size_ + 1))) =
207           std::move(value);
208       return;
209     }
210     MaybeExtend();
211     if (!using_sso()) ++rep()->allocated_size;
212     auto* result = TypeHandler::New(arena_, std::move(value));
213     element_at(ExchangeCurrentSize(current_size_ + 1)) = result;
214   }
215 
216   // Must be called from destructor.
217   //
218   // Pre-condition: NeedsDestroy() returns true.
219   template <typename TypeHandler>
Destroy()220   void Destroy() {
221     ABSL_DCHECK(NeedsDestroy());
222 
223     // TODO: arena check is redundant once all `RepeatedPtrField`s
224     // with non-null arena are owned by the arena.
225     if (PROTOBUF_PREDICT_FALSE(arena_ != nullptr)) return;
226 
227     using H = CommonHandler<TypeHandler>;
228     int n = allocated_size();
229     void** elems = elements();
230     for (int i = 0; i < n; i++) {
231       Delete<H>(elems[i], nullptr);
232     }
233     if (!using_sso()) {
234       internal::SizedDelete(rep(),
235                             Capacity() * sizeof(elems[0]) + kRepHeaderSize);
236     }
237   }
238 
NeedsDestroy()239   inline bool NeedsDestroy() const {
240     // Either there is an allocated element in SSO buffer or there is an
241     // allocated Rep.
242     return tagged_rep_or_elem_ != nullptr;
243   }
244   void DestroyProtos();
245 
246  public:
247   // The next few methods are public so that they can be called from generated
248   // code when implicit weak fields are used, but they should never be called by
249   // application code.
250 
251   template <typename TypeHandler>
Get(int index)252   const Value<TypeHandler>& Get(int index) const {
253     ABSL_DCHECK_GE(index, 0);
254     ABSL_DCHECK_LT(index, current_size_);
255     return *cast<TypeHandler>(element_at(index));
256   }
257 
258   // Creates and adds an element using the given prototype, without introducing
259   // a link-time dependency on the concrete message type.
260   //
261   // Pre-condition: prototype must not be nullptr.
262   MessageLite* AddMessage(const MessageLite* prototype);
263 
264   template <typename TypeHandler>
Clear()265   void Clear() {
266     const int n = current_size_;
267     ABSL_DCHECK_GE(n, 0);
268     if (n > 0) {
269       using H = CommonHandler<TypeHandler>;
270       ClearNonEmpty<H>();
271     }
272   }
273 
274   // Appends all message values from `from` to this instance.
275   template <typename T>
MergeFrom(const RepeatedPtrFieldBase & from)276   void MergeFrom(const RepeatedPtrFieldBase& from) {
277     static_assert(std::is_base_of<MessageLite, T>::value, "");
278 #ifdef __cpp_if_constexpr
279     if constexpr (!std::is_base_of<Message, T>::value) {
280       // For LITE objects we use the generic MergeFrom to save on binary size.
281       return MergeFrom<MessageLite>(from);
282     }
283 #endif
284     MergeFromConcreteMessage(from, Arena::CopyConstruct<T>);
285   }
286 
InternalSwap(RepeatedPtrFieldBase * PROTOBUF_RESTRICT rhs)287   inline void InternalSwap(RepeatedPtrFieldBase* PROTOBUF_RESTRICT rhs) {
288     ABSL_DCHECK(this != rhs);
289 
290     // Swap all fields except arena pointer at once.
291     internal::memswap<ArenaOffsetHelper<RepeatedPtrFieldBase>::value>(
292         reinterpret_cast<char*>(this), reinterpret_cast<char*>(rhs));
293   }
294 
295   // Returns true if there are no preallocated elements in the array.
PrepareForParse()296   bool PrepareForParse() { return allocated_size() == current_size_; }
297 
298   // Similar to `AddAllocated` but faster.
299   //
300   // Pre-condition: PrepareForParse() is true.
AddAllocatedForParse(void * value)301   void AddAllocatedForParse(void* value) {
302     ABSL_DCHECK(PrepareForParse());
303     if (PROTOBUF_PREDICT_FALSE(SizeAtCapacity())) {
304       *InternalExtend(1) = value;
305       ++rep()->allocated_size;
306     } else {
307       if (using_sso()) {
308         tagged_rep_or_elem_ = value;
309       } else {
310         rep()->elements[current_size_] = value;
311         ++rep()->allocated_size;
312       }
313     }
314     ExchangeCurrentSize(current_size_ + 1);
315   }
316 
317  protected:
318   template <typename TypeHandler>
RemoveLast()319   void RemoveLast() {
320     ABSL_DCHECK_GT(current_size_, 0);
321     ExchangeCurrentSize(current_size_ - 1);
322     using H = CommonHandler<TypeHandler>;
323     H::Clear(cast<H>(element_at(current_size_)));
324   }
325 
326   template <typename TypeHandler>
CopyFrom(const RepeatedPtrFieldBase & other)327   void CopyFrom(const RepeatedPtrFieldBase& other) {
328     if (&other == this) return;
329     Clear<TypeHandler>();
330     if (other.empty()) return;
331     MergeFrom<typename TypeHandler::Type>(other);
332   }
333 
334   void CloseGap(int start, int num);
335 
336   void Reserve(int capacity);
337 
338   template <typename TypeHandler>
copy(const Value<TypeHandler> * value)339   static inline Value<TypeHandler>* copy(const Value<TypeHandler>* value) {
340     using H = CommonHandler<TypeHandler>;
341     auto* new_value = H::NewFromPrototype(value, nullptr);
342     H::Merge(*value, new_value);
343     return cast<TypeHandler>(new_value);
344   }
345 
346   // Used for constructing iterators.
raw_data()347   void* const* raw_data() const { return elements(); }
raw_mutable_data()348   void** raw_mutable_data() { return elements(); }
349 
350   template <typename TypeHandler>
mutable_data()351   Value<TypeHandler>** mutable_data() {
352     // TODO:  Breaks C++ aliasing rules.  We should probably remove this
353     //   method entirely.
354     return reinterpret_cast<Value<TypeHandler>**>(raw_mutable_data());
355   }
356 
357   template <typename TypeHandler>
data()358   const Value<TypeHandler>* const* data() const {
359     // TODO:  Breaks C++ aliasing rules.  We should probably remove this
360     //   method entirely.
361     return reinterpret_cast<const Value<TypeHandler>* const*>(raw_data());
362   }
363 
364   template <typename TypeHandler>
Swap(RepeatedPtrFieldBase * other)365   PROTOBUF_NDEBUG_INLINE void Swap(RepeatedPtrFieldBase* other) {
366     if (internal::CanUseInternalSwap(GetArena(), other->GetArena())) {
367       InternalSwap(other);
368     } else {
369       SwapFallback<TypeHandler>(other);
370     }
371   }
372 
SwapElements(int index1,int index2)373   void SwapElements(int index1, int index2) {
374     using std::swap;  // enable ADL with fallback
375     swap(element_at(index1), element_at(index2));
376   }
377 
378   template <typename TypeHandler>
SpaceUsedExcludingSelfLong()379   PROTOBUF_NOINLINE size_t SpaceUsedExcludingSelfLong() const {
380     size_t allocated_bytes =
381         using_sso()
382             ? 0
383             : static_cast<size_t>(Capacity()) * sizeof(void*) + kRepHeaderSize;
384     const int n = allocated_size();
385     void* const* elems = elements();
386     for (int i = 0; i < n; ++i) {
387       allocated_bytes +=
388           TypeHandler::SpaceUsedLong(*cast<TypeHandler>(elems[i]));
389     }
390     return allocated_bytes;
391   }
392 
393   // Advanced memory management --------------------------------------
394 
395   // Like Add(), but if there are no cleared objects to use, returns nullptr.
396   template <typename TypeHandler>
AddFromCleared()397   Value<TypeHandler>* AddFromCleared() {
398     if (current_size_ < allocated_size()) {
399       return cast<TypeHandler>(
400           element_at(ExchangeCurrentSize(current_size_ + 1)));
401     } else {
402       return nullptr;
403     }
404   }
405 
406   template <typename TypeHandler>
AddAllocated(Value<TypeHandler> * value)407   void AddAllocated(Value<TypeHandler>* value) {
408     ABSL_DCHECK_NE(value, nullptr);
409     Arena* element_arena = TypeHandler::GetArena(value);
410     Arena* arena = GetArena();
411     if (arena != element_arena || AllocatedSizeAtCapacity()) {
412       AddAllocatedSlowWithCopy<TypeHandler>(value, element_arena, arena);
413       return;
414     }
415     // Fast path: underlying arena representation (tagged pointer) is equal to
416     // our arena pointer, and we can add to array without resizing it (at
417     // least one slot that is not allocated).
418     void** elems = elements();
419     if (current_size_ < allocated_size()) {
420       // Make space at [current] by moving first allocated element to end of
421       // allocated list.
422       elems[allocated_size()] = elems[current_size_];
423     }
424     elems[ExchangeCurrentSize(current_size_ + 1)] = value;
425     if (!using_sso()) ++rep()->allocated_size;
426   }
427 
428   template <typename TypeHandler>
UnsafeArenaAddAllocated(Value<TypeHandler> * value)429   void UnsafeArenaAddAllocated(Value<TypeHandler>* value) {
430     ABSL_DCHECK_NE(value, nullptr);
431     // Make room for the new pointer.
432     if (SizeAtCapacity()) {
433       // The array is completely full with no cleared objects, so grow it.
434       InternalExtend(1);
435       ++rep()->allocated_size;
436     } else if (AllocatedSizeAtCapacity()) {
437       // There is no more space in the pointer array because it contains some
438       // cleared objects awaiting reuse.  We don't want to grow the array in
439       // this case because otherwise a loop calling AddAllocated() followed by
440       // Clear() would leak memory.
441       using H = CommonHandler<TypeHandler>;
442       Delete<H>(element_at(current_size_), arena_);
443     } else if (current_size_ < allocated_size()) {
444       // We have some cleared objects.  We don't care about their order, so we
445       // can just move the first one to the end to make space.
446       element_at(allocated_size()) = element_at(current_size_);
447       ++rep()->allocated_size;
448     } else {
449       // There are no cleared objects.
450       if (!using_sso()) ++rep()->allocated_size;
451     }
452 
453     element_at(ExchangeCurrentSize(current_size_ + 1)) = value;
454   }
455 
456   template <typename TypeHandler>
ReleaseLast()457   PROTOBUF_NODISCARD Value<TypeHandler>* ReleaseLast() {
458     Value<TypeHandler>* result = UnsafeArenaReleaseLast<TypeHandler>();
459     // Now perform a copy if we're on an arena.
460     Arena* arena = GetArena();
461 
462     if (internal::DebugHardenForceCopyInRelease()) {
463       auto* new_result = copy<TypeHandler>(result);
464       if (arena == nullptr) delete result;
465       return new_result;
466     } else {
467       return (arena == nullptr) ? result : copy<TypeHandler>(result);
468     }
469   }
470 
471   // Releases and returns the last element, but does not do out-of-arena copy.
472   // Instead, just returns the raw pointer to the contained element in the
473   // arena.
474   template <typename TypeHandler>
UnsafeArenaReleaseLast()475   Value<TypeHandler>* UnsafeArenaReleaseLast() {
476     ABSL_DCHECK_GT(current_size_, 0);
477     ExchangeCurrentSize(current_size_ - 1);
478     auto* result = cast<TypeHandler>(element_at(current_size_));
479     if (using_sso()) {
480       tagged_rep_or_elem_ = nullptr;
481     } else {
482       --rep()->allocated_size;
483       if (current_size_ < allocated_size()) {
484         // There are cleared elements on the end; replace the removed element
485         // with the last allocated element.
486         element_at(current_size_) = element_at(allocated_size());
487       }
488     }
489     return result;
490   }
491 
ClearedCount()492   int ClearedCount() const { return allocated_size() - current_size_; }
493 
494   // Slowpath handles all cases, copying if necessary.
495   template <typename TypeHandler>
AddAllocatedSlowWithCopy(Value<TypeHandler> * value,Arena * value_arena,Arena * my_arena)496   PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy(
497       // Pass value_arena and my_arena to avoid duplicate virtual call (value)
498       // or load (mine).
499       Value<TypeHandler>* value, Arena* value_arena, Arena* my_arena) {
500     using H = CommonHandler<TypeHandler>;
501     // Ensure that either the value is in the same arena, or if not, we do the
502     // appropriate thing: Own() it (if it's on heap and we're in an arena) or
503     // copy it to our arena/heap (otherwise).
504     if (my_arena != nullptr && value_arena == nullptr) {
505       my_arena->Own(value);
506     } else if (my_arena != value_arena) {
507       ABSL_DCHECK(value_arena != nullptr);
508       auto* new_value = TypeHandler::NewFromPrototype(value, my_arena);
509       H::Merge(*value, new_value);
510       value = new_value;
511     }
512 
513     UnsafeArenaAddAllocated<H>(value);
514   }
515 
516   template <typename TypeHandler>
SwapFallback(RepeatedPtrFieldBase * other)517   PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other) {
518     ABSL_DCHECK(!internal::CanUseInternalSwap(GetArena(), other->GetArena()));
519 
520     // Copy semantics in this case. We try to improve efficiency by placing the
521     // temporary on |other|'s arena so that messages are copied twice rather
522     // than three times.
523     RepeatedPtrFieldBase temp(other->GetArena());
524     if (!this->empty()) {
525       temp.MergeFrom<typename TypeHandler::Type>(*this);
526     }
527     this->CopyFrom<TypeHandler>(*other);
528     other->InternalSwap(&temp);
529     if (temp.NeedsDestroy()) {
530       temp.Destroy<TypeHandler>();
531     }
532   }
533 
534   // Gets the Arena on which this RepeatedPtrField stores its elements.
GetArena()535   inline Arena* GetArena() const { return arena_; }
536 
InternalGetArenaOffset(internal::InternalVisibility)537   static constexpr size_t InternalGetArenaOffset(internal::InternalVisibility) {
538     return PROTOBUF_FIELD_OFFSET(RepeatedPtrFieldBase, arena_);
539   }
540 
541  private:
542   using InternalArenaConstructable_ = void;
543   using DestructorSkippable_ = void;
544 
545   template <typename T>
546   friend class Arena::InternalHelper;
547 
548   // ExtensionSet stores repeated message extensions as
549   // RepeatedPtrField<MessageLite>, but non-lite ExtensionSets need to implement
550   // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong()
551   // reinterpreting MessageLite as Message.  ExtensionSet also needs to make use
552   // of AddFromCleared(), which is not part of the public interface.
553   friend class ExtensionSet;
554 
555   // The MapFieldBase implementation needs to call protected methods directly,
556   // reinterpreting pointers as being to Message instead of a specific Message
557   // subclass.
558   friend class MapFieldBase;
559   friend struct MapFieldTestPeer;
560 
561   // The table-driven MergePartialFromCodedStream implementation needs to
562   // operate on RepeatedPtrField<MessageLite>.
563   friend class MergePartialFromCodedStreamHelper;
564 
565   friend class AccessorHelper;
566 
567   template <typename T>
568   friend struct google::protobuf::WeakRepeatedPtrField;
569 
570   friend class internal::TcParser;  // TODO: Remove this friend.
571 
572   // Expose offset of `arena_` without exposing the member itself.
573   // Used to optimize code size of `InternalSwap` method.
574   template <typename T>
575   friend struct ArenaOffsetHelper;
576 
577   // The reflection implementation needs to call protected methods directly,
578   // reinterpreting pointers as being to Message instead of a specific Message
579   // subclass.
580   friend class google::protobuf::Reflection;
581   friend class internal::SwapFieldHelper;
582 
583   // Concrete Arena enabled copy function used to copy messages instances.
584   // This follows the `Arena::CopyConstruct` signature so that the compiler
585   // can have the inlined call into the out of line copy function(s) simply pass
586   // the address of `Arena::CopyConstruct` 'as is'.
587   using CopyFn = void* (*)(Arena*, const void*);
588 
589   struct Rep {
590     int allocated_size;
591     // Here we declare a huge array as a way of approximating C's "flexible
592     // array member" feature without relying on undefined behavior.
593     void* elements[(std::numeric_limits<int>::max() - 2 * sizeof(int)) /
594                    sizeof(void*)];
595   };
596 
597   static constexpr size_t kRepHeaderSize = offsetof(Rep, elements);
598 
599   // Replaces current_size_ with new_size and returns the previous value of
600   // current_size_. This function is intended to be the only place where
601   // current_size_ is modified.
ExchangeCurrentSize(int new_size)602   inline int ExchangeCurrentSize(int new_size) {
603     return std::exchange(current_size_, new_size);
604   }
SizeAtCapacity()605   inline bool SizeAtCapacity() const {
606     // Harden invariant size() <= allocated_size() <= Capacity().
607     ABSL_DCHECK_LE(size(), allocated_size());
608     ABSL_DCHECK_LE(allocated_size(), Capacity());
609     // This is equivalent to `current_size_ == Capacity()`.
610     // Assuming `Capacity()` function is inlined, compiler is likely to optimize
611     // away "+ kSSOCapacity" and reduce it to "current_size_ > capacity_proxy_"
612     // which is an instruction less than "current_size_ == capacity_proxy_ + 1".
613     return current_size_ >= Capacity();
614   }
AllocatedSizeAtCapacity()615   inline bool AllocatedSizeAtCapacity() const {
616     // Harden invariant size() <= allocated_size() <= Capacity().
617     ABSL_DCHECK_LE(size(), allocated_size());
618     ABSL_DCHECK_LE(allocated_size(), Capacity());
619     // This combines optimization mentioned in `SizeAtCapacity()` and simplifies
620     // `allocated_size()` in sso case.
621     return using_sso() ? (tagged_rep_or_elem_ != nullptr)
622                        : rep()->allocated_size >= Capacity();
623   }
624 
elements()625   void* const* elements() const {
626     return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements;
627   }
elements()628   void** elements() {
629     return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements;
630   }
631 
element_at(int index)632   void*& element_at(int index) {
633     if (using_sso()) {
634       ABSL_DCHECK_EQ(index, 0);
635       return tagged_rep_or_elem_;
636     }
637     return rep()->elements[index];
638   }
element_at(int index)639   const void* element_at(int index) const {
640     return const_cast<RepeatedPtrFieldBase*>(this)->element_at(index);
641   }
642 
allocated_size()643   int allocated_size() const {
644     return using_sso() ? (tagged_rep_or_elem_ != nullptr ? 1 : 0)
645                        : rep()->allocated_size;
646   }
rep()647   Rep* rep() {
648     ABSL_DCHECK(!using_sso());
649     return reinterpret_cast<Rep*>(
650         reinterpret_cast<uintptr_t>(tagged_rep_or_elem_) - 1);
651   }
rep()652   const Rep* rep() const {
653     return const_cast<RepeatedPtrFieldBase*>(this)->rep();
654   }
655 
using_sso()656   bool using_sso() const {
657     return (reinterpret_cast<uintptr_t>(tagged_rep_or_elem_) & 1) == 0;
658   }
659 
660   template <typename TypeHandler>
cast(void * element)661   static inline Value<TypeHandler>* cast(void* element) {
662     return reinterpret_cast<Value<TypeHandler>*>(element);
663   }
664   template <typename TypeHandler>
cast(const void * element)665   static inline const Value<TypeHandler>* cast(const void* element) {
666     return reinterpret_cast<const Value<TypeHandler>*>(element);
667   }
668   template <typename TypeHandler>
Delete(void * obj,Arena * arena)669   static inline void Delete(void* obj, Arena* arena) {
670     TypeHandler::Delete(cast<TypeHandler>(obj), arena);
671   }
672 
673   // Out-of-line helper routine for Clear() once the inlined check has
674   // determined the container is non-empty
675   template <typename TypeHandler>
ClearNonEmpty()676   PROTOBUF_NOINLINE void ClearNonEmpty() {
677     const int n = current_size_;
678     void* const* elems = elements();
679     int i = 0;
680     ABSL_DCHECK_GT(n, 0);
681     // do/while loop to avoid initial test because we know n > 0
682     do {
683       TypeHandler::Clear(cast<TypeHandler>(elems[i++]));
684     } while (i < n);
685     ExchangeCurrentSize(0);
686   }
687 
688   // Merges messages from `from` into available, cleared messages sitting in the
689   // range `[size(), allocated_size())`. Returns the number of message merged
690   // which is `ClearedCount(), from.size())`.
691   // Note that this function does explicitly NOT update `current_size_`.
692   // This function is out of line as it should be the slow path: this scenario
693   // only happens when a caller constructs and fills a repeated field, then
694   // shrinks it, and then merges additional messages into it.
695   int MergeIntoClearedMessages(const RepeatedPtrFieldBase& from);
696 
697   // Appends all messages from `from` to this instance, using the
698   // provided `copy_fn` copy function to copy existing messages.
699   void MergeFromConcreteMessage(const RepeatedPtrFieldBase& from,
700                                 CopyFn copy_fn);
701 
702   // Extends capacity by at least |extend_amount|. Returns a pointer to the
703   // next available element slot.
704   //
705   // Pre-condition: |extend_amount| must be > 0.
706   void** InternalExtend(int extend_amount);
707 
708   // Ensures that capacity is big enough to store one more allocated element.
MaybeExtend()709   inline void MaybeExtend() {
710     if (AllocatedSizeAtCapacity()) {
711       ABSL_DCHECK_EQ(allocated_size(), Capacity());
712       InternalExtend(1);
713     } else {
714       ABSL_DCHECK_NE(allocated_size(), Capacity());
715     }
716   }
717 
718   // Ensures that capacity is at least `n` elements.
719   // Returns a pointer to the element directly beyond the last element.
InternalReserve(int n)720   inline void** InternalReserve(int n) {
721     if (n <= Capacity()) {
722       void** elements = using_sso() ? &tagged_rep_or_elem_ : rep()->elements;
723       return elements + current_size_;
724     }
725     return InternalExtend(n - Capacity());
726   }
727 
728   // Internal helpers for Add that keep definition out-of-line.
729   void* AddMessageLite(ElementFactory factory);
730   void* AddString();
731 
732   // Common implementation used by various Add* methods. `factory` is an object
733   // used to construct a new element unless there are spare cleared elements
734   // ready for reuse. Returns pointer to the new element.
735   //
736   // Note: avoid inlining this function in methods such as `Add()` as this would
737   // drastically increase binary size due to template instantiation and implicit
738   // inlining.
739   template <typename Factory>
740   void* AddInternal(Factory factory);
741 
742   // A few notes on internal representation:
743   //
744   // We use an indirected approach, with struct Rep, to keep
745   // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support
746   // was added; namely, 3 8-byte machine words on x86-64. An instance of Rep is
747   // allocated only when the repeated field is non-empty, and it is a
748   // dynamically-sized struct (the header is directly followed by elements[]).
749   // We place arena_ and current_size_ directly in the object to avoid cache
750   // misses due to the indirection, because these fields are checked frequently.
751   // Placing all fields directly in the RepeatedPtrFieldBase instance would cost
752   // significant performance for memory-sensitive workloads.
753   void* tagged_rep_or_elem_;
754   int current_size_;
755   int capacity_proxy_;  // we store `capacity - kSSOCapacity` as an optimization
756   Arena* arena_;
757 };
758 
759 // Appends all message values from `from` to this instance using the abstract
760 // message interface. This overload is used in places like reflection and
761 // other locations where the underlying type is unavailable
762 template <>
763 void RepeatedPtrFieldBase::MergeFrom<MessageLite>(
764     const RepeatedPtrFieldBase& from);
765 
766 template <>
767 inline void RepeatedPtrFieldBase::MergeFrom<Message>(
768     const RepeatedPtrFieldBase& from) {
769   return MergeFrom<MessageLite>(from);
770 }
771 
772 // Appends all `std::string` values from `from` to this instance.
773 template <>
774 void RepeatedPtrFieldBase::MergeFrom<std::string>(
775     const RepeatedPtrFieldBase& from);
776 
777 
778 template <typename Factory>
AddInternal(Factory factory)779 void* RepeatedPtrFieldBase::AddInternal(Factory factory) {
780   Arena* const arena = GetArena();
781   if (tagged_rep_or_elem_ == nullptr) {
782     ExchangeCurrentSize(1);
783     tagged_rep_or_elem_ = factory(arena);
784     return tagged_rep_or_elem_;
785   }
786   absl::PrefetchToLocalCache(tagged_rep_or_elem_);
787   if (using_sso()) {
788     if (current_size_ == 0) {
789       ExchangeCurrentSize(1);
790       return tagged_rep_or_elem_;
791     }
792     void*& result = *InternalExtend(1);
793     result = factory(arena);
794     Rep* r = rep();
795     r->allocated_size = 2;
796     ExchangeCurrentSize(2);
797     return result;
798   }
799   Rep* r = rep();
800   if (PROTOBUF_PREDICT_FALSE(SizeAtCapacity())) {
801     InternalExtend(1);
802     r = rep();
803   } else {
804     if (current_size_ != r->allocated_size) {
805       return r->elements[ExchangeCurrentSize(current_size_ + 1)];
806     }
807   }
808   ++r->allocated_size;
809   void*& result = r->elements[ExchangeCurrentSize(current_size_ + 1)];
810   result = factory(arena);
811   return result;
812 }
813 
814 PROTOBUF_EXPORT void InternalOutOfLineDeleteMessageLite(MessageLite* message);
815 
816 template <typename GenericType>
817 class GenericTypeHandler {
818  public:
819   using Type = GenericType;
820   using Movable = IsMovable<Type>;
821 
GetNewFunc()822   static constexpr auto GetNewFunc() { return Arena::DefaultConstruct<Type>; }
GetArena(Type * value)823   static inline Arena* GetArena(Type* value) {
824     return Arena::InternalGetArena(value);
825   }
826 
New(Arena * arena)827   static inline Type* New(Arena* arena) {
828     return static_cast<Type*>(Arena::DefaultConstruct<Type>(arena));
829   }
New(Arena * arena,Type && value)830   static inline Type* New(Arena* arena, Type&& value) {
831     return Arena::Create<Type>(arena, std::move(value));
832   }
833   static inline Type* NewFromPrototype(const Type* /*prototype*/,
834                                        Arena* arena = nullptr) {
835     return New(arena);
836   }
Delete(Type * value,Arena * arena)837   static inline void Delete(Type* value, Arena* arena) {
838     if (arena != nullptr) return;
839 #ifdef __cpp_if_constexpr
840     if constexpr (std::is_base_of<MessageLite, Type>::value) {
841       // Using virtual destructor to reduce generated code size that would have
842       // happened otherwise due to inlined `~Type()`.
843       InternalOutOfLineDeleteMessageLite(value);
844     } else {
845       delete value;
846     }
847 #else
848     delete value;
849 #endif
850   }
Clear(Type * value)851   static inline void Clear(Type* value) { value->Clear(); }
852   static void Merge(const Type& from, Type* to);
SpaceUsedLong(const Type & value)853   static inline size_t SpaceUsedLong(const Type& value) {
854     return value.SpaceUsedLong();
855   }
856 };
857 
858 // NewFromPrototypeHelper() is not defined inline here, as we will need to do a
859 // virtual function dispatch anyways to go from Message* to call New/Merge. (The
860 // additional helper is needed as a workaround for MSVC.)
861 PROTOBUF_EXPORT MessageLite* NewFromPrototypeHelper(
862     const MessageLite* prototype, Arena* arena);
863 
864 template <>
NewFromPrototype(const MessageLite * prototype,Arena * arena)865 inline MessageLite* GenericTypeHandler<MessageLite>::NewFromPrototype(
866     const MessageLite* prototype, Arena* arena) {
867   return NewFromPrototypeHelper(prototype, arena);
868 }
869 template <>
GetArena(MessageLite * value)870 inline Arena* GenericTypeHandler<MessageLite>::GetArena(MessageLite* value) {
871   return value->GetArena();
872 }
873 
874 template <typename GenericType>
Merge(const GenericType & from,GenericType * to)875 PROTOBUF_NOINLINE inline void GenericTypeHandler<GenericType>::Merge(
876     const GenericType& from, GenericType* to) {
877   to->MergeFrom(from);
878 }
879 template <>
880 PROTOBUF_EXPORT void GenericTypeHandler<MessageLite>::Merge(
881     const MessageLite& from, MessageLite* to);
882 
883 // Message specialization bodies defined in message.cc. This split is necessary
884 // to allow proto2-lite (which includes this header) to be independent of
885 // Message.
886 template <>
887 PROTOBUF_EXPORT Message* GenericTypeHandler<Message>::NewFromPrototype(
888     const Message* prototype, Arena* arena);
889 template <>
890 PROTOBUF_EXPORT Arena* GenericTypeHandler<Message>::GetArena(Message* value);
891 
892 PROTOBUF_EXPORT void* NewStringElement(Arena* arena);
893 
894 template <>
895 class GenericTypeHandler<std::string> {
896  public:
897   using Type = std::string;
898   using Movable = IsMovable<Type>;
899 
GetNewFunc()900   static constexpr auto GetNewFunc() { return NewStringElement; }
GetArena(Type *)901   static inline Arena* GetArena(Type*) { return nullptr; }
902 
New(Arena * arena)903   static PROTOBUF_NOINLINE Type* New(Arena* arena) {
904     return Arena::Create<Type>(arena);
905   }
New(Arena * arena,Type && value)906   static PROTOBUF_NOINLINE Type* New(Arena* arena, Type&& value) {
907     return Arena::Create<Type>(arena, std::move(value));
908   }
NewFromPrototype(const Type *,Arena * arena)909   static inline Type* NewFromPrototype(const Type*, Arena* arena) {
910     return New(arena);
911   }
Delete(Type * value,Arena * arena)912   static inline void Delete(Type* value, Arena* arena) {
913     if (arena == nullptr) {
914       delete value;
915     }
916   }
Clear(Type * value)917   static inline void Clear(Type* value) { value->clear(); }
Merge(const Type & from,Type * to)918   static inline void Merge(const Type& from, Type* to) { *to = from; }
SpaceUsedLong(const Type & value)919   static size_t SpaceUsedLong(const Type& value) {
920     return sizeof(value) + StringSpaceUsedExcludingSelfLong(value);
921   }
922 };
923 
924 }  // namespace internal
925 
926 // RepeatedPtrField is like RepeatedField, but used for repeated strings or
927 // Messages.
928 template <typename Element>
929 class RepeatedPtrField final : private internal::RepeatedPtrFieldBase {
930   static_assert(!std::is_const<Element>::value,
931                 "We do not support const value types.");
932   static_assert(!std::is_volatile<Element>::value,
933                 "We do not support volatile value types.");
934   static_assert(!std::is_pointer<Element>::value,
935                 "We do not support pointer value types.");
936   static_assert(!std::is_reference<Element>::value,
937                 "We do not support reference value types.");
StaticValidityCheck()938   static constexpr PROTOBUF_ALWAYS_INLINE void StaticValidityCheck() {
939     static_assert(
940         absl::disjunction<
941             internal::is_supported_string_type<Element>,
942             internal::is_supported_message_type<Element>>::value,
943         "We only support string and Message types in RepeatedPtrField.");
944   }
945 
946  public:
947   using value_type = Element;
948   using size_type = int;
949   using difference_type = ptrdiff_t;
950   using reference = Element&;
951   using const_reference = const Element&;
952   using pointer = Element*;
953   using const_pointer = const Element*;
954   using iterator = internal::RepeatedPtrIterator<Element>;
955   using const_iterator = internal::RepeatedPtrIterator<const Element>;
956   using reverse_iterator = std::reverse_iterator<iterator>;
957   using const_reverse_iterator = std::reverse_iterator<const_iterator>;
958   // Custom STL-like iterator that iterates over and returns the underlying
959   // pointers to Element rather than Element itself.
960   using pointer_iterator =
961       internal::RepeatedPtrOverPtrsIterator<Element*, void*>;
962   using const_pointer_iterator =
963       internal::RepeatedPtrOverPtrsIterator<const Element* const,
964                                             const void* const>;
965 
966   constexpr RepeatedPtrField();
967 
968   // Arena enabled constructors: for internal use only.
RepeatedPtrField(internal::InternalVisibility,Arena * arena)969   RepeatedPtrField(internal::InternalVisibility, Arena* arena)
970       : RepeatedPtrField(arena) {}
RepeatedPtrField(internal::InternalVisibility,Arena * arena,const RepeatedPtrField & rhs)971   RepeatedPtrField(internal::InternalVisibility, Arena* arena,
972                    const RepeatedPtrField& rhs)
973       : RepeatedPtrField(arena, rhs) {}
974 
975   // TODO: make constructor private
976   explicit RepeatedPtrField(Arena* arena);
977 
978   template <typename Iter,
979             typename = typename std::enable_if<std::is_constructible<
980                 Element, decltype(*std::declval<Iter>())>::value>::type>
981   RepeatedPtrField(Iter begin, Iter end);
982 
RepeatedPtrField(const RepeatedPtrField & rhs)983   RepeatedPtrField(const RepeatedPtrField& rhs)
984       : RepeatedPtrField(nullptr, rhs) {}
985   RepeatedPtrField& operator=(const RepeatedPtrField& other)
986       ABSL_ATTRIBUTE_LIFETIME_BOUND;
987 
RepeatedPtrField(RepeatedPtrField && rhs)988   RepeatedPtrField(RepeatedPtrField&& rhs) noexcept
989       : RepeatedPtrField(nullptr, std::move(rhs)) {}
990   RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept
991       ABSL_ATTRIBUTE_LIFETIME_BOUND;
992 
993   ~RepeatedPtrField();
994 
995   bool empty() const;
996   int size() const;
997 
998   const_reference Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND;
999   pointer Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND;
1000 
1001   // Unlike std::vector, adding an element to a RepeatedPtrField doesn't always
1002   // make a new element; it might re-use an element left over from when the
1003   // field was Clear()'d or resize()'d smaller.  For this reason, Add() is the
1004   // fastest API for adding a new element.
1005   pointer Add() ABSL_ATTRIBUTE_LIFETIME_BOUND;
1006 
1007   // `Add(std::move(value));` is equivalent to `*Add() = std::move(value);`
1008   // It will either move-construct to the end of this field, or swap value
1009   // with the new-or-recycled element at the end of this field.  Note that
1010   // this operation is very slow if this RepeatedPtrField is not on the
1011   // same Arena, if any, as `value`.
1012   void Add(Element&& value);
1013 
1014   // Copying to the end of this RepeatedPtrField is slowest of all; it can't
1015   // reliably copy-construct to the last element of this RepeatedPtrField, for
1016   // example (unlike std::vector).
1017   // We currently block this API.  The right way to add to the end is to call
1018   // Add() and modify the element it points to.
1019   // If you must add an existing value, call `*Add() = value;`
1020   void Add(const Element& value) = delete;
1021 
1022   // Append elements in the range [begin, end) after reserving
1023   // the appropriate number of elements.
1024   template <typename Iter>
1025   void Add(Iter begin, Iter end);
1026 
1027   const_reference operator[](int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1028     return Get(index);
1029   }
1030   reference operator[](int index) ABSL_ATTRIBUTE_LIFETIME_BOUND {
1031     return *Mutable(index);
1032   }
1033 
1034   const_reference at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1035   reference at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND;
1036 
1037   // Removes the last element in the array.
1038   // Ownership of the element is retained by the array.
1039   void RemoveLast();
1040 
1041   // Deletes elements with indices in the range [start .. start+num-1].
1042   // Caution: moves all elements with indices [start+num .. ].
1043   // Calling this routine inside a loop can cause quadratic behavior.
1044   void DeleteSubrange(int start, int num);
1045 
1046   ABSL_ATTRIBUTE_REINITIALIZES void Clear();
1047 
1048   // Appends the elements from `other` after this instance.
1049   // The end result length will be `other.size() + this->size()`.
1050   void MergeFrom(const RepeatedPtrField& other);
1051 
1052   // Replaces the contents with a copy of the elements from `other`.
1053   ABSL_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedPtrField& other);
1054 
1055   // Replaces the contents with RepeatedPtrField(begin, end).
1056   template <typename Iter>
1057   ABSL_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end);
1058 
1059   // Reserves space to expand the field to at least the given size.  This only
1060   // resizes the pointer array; it doesn't allocate any objects.  If the
1061   // array is grown, it will always be at least doubled in size.
1062   void Reserve(int new_size);
1063 
1064   int Capacity() const;
1065 
1066   // Gets the underlying array.  This pointer is possibly invalidated by
1067   // any add or remove operation.
1068   Element**
1069   mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND;
1070   const Element* const* data() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1071 
1072   // Swaps entire contents with "other". If they are on separate arenas, then
1073   // copies data.
1074   void Swap(RepeatedPtrField* other);
1075 
1076   // Swaps entire contents with "other". Caller should guarantee that either
1077   // both fields are on the same arena or both are on the heap. Swapping between
1078   // different arenas with this function is disallowed and is caught via
1079   // ABSL_DCHECK.
1080   void UnsafeArenaSwap(RepeatedPtrField* other);
1081 
1082   // Swaps two elements.
1083   void SwapElements(int index1, int index2);
1084 
1085   iterator begin() ABSL_ATTRIBUTE_LIFETIME_BOUND;
1086   const_iterator begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1087   const_iterator cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1088   iterator end() ABSL_ATTRIBUTE_LIFETIME_BOUND;
1089   const_iterator end() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1090   const_iterator cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1091 
rbegin()1092   reverse_iterator rbegin() ABSL_ATTRIBUTE_LIFETIME_BOUND {
1093     return reverse_iterator(end());
1094   }
rbegin()1095   const_reverse_iterator rbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1096     return const_reverse_iterator(end());
1097   }
rend()1098   reverse_iterator rend() ABSL_ATTRIBUTE_LIFETIME_BOUND {
1099     return reverse_iterator(begin());
1100   }
rend()1101   const_reverse_iterator rend() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1102     return const_reverse_iterator(begin());
1103   }
1104 
1105   pointer_iterator pointer_begin() ABSL_ATTRIBUTE_LIFETIME_BOUND;
1106   const_pointer_iterator pointer_begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1107   pointer_iterator pointer_end() ABSL_ATTRIBUTE_LIFETIME_BOUND;
1108   const_pointer_iterator pointer_end() const ABSL_ATTRIBUTE_LIFETIME_BOUND;
1109 
1110   // Returns (an estimate of) the number of bytes used by the repeated field,
1111   // excluding sizeof(*this).
1112   size_t SpaceUsedExcludingSelfLong() const;
1113 
SpaceUsedExcludingSelf()1114   int SpaceUsedExcludingSelf() const {
1115     return internal::ToIntSize(SpaceUsedExcludingSelfLong());
1116   }
1117 
1118   // Advanced memory management --------------------------------------
1119   // When hardcore memory management becomes necessary -- as it sometimes
1120   // does here at Google -- the following methods may be useful.
1121 
1122   // Adds an already-allocated object, passing ownership to the
1123   // RepeatedPtrField.
1124   //
1125   // Note that some special behavior occurs with respect to arenas:
1126   //
1127   //   (i) if this field holds submessages, the new submessage will be copied if
1128   //   the original is in an arena and this RepeatedPtrField is either in a
1129   //   different arena, or on the heap.
1130   //   (ii) if this field holds strings, the passed-in string *must* be
1131   //   heap-allocated, not arena-allocated. There is no way to dynamically check
1132   //   this at runtime, so User Beware.
1133   // Requires:  value != nullptr
1134   void AddAllocated(Element* value);
1135 
1136   // Removes and returns the last element, passing ownership to the caller.
1137   // Requires:  size() > 0
1138   //
1139   // If this RepeatedPtrField is on an arena, an object copy is required to pass
1140   // ownership back to the user (for compatible semantics). Use
1141   // UnsafeArenaReleaseLast() if this behavior is undesired.
1142   PROTOBUF_NODISCARD Element* ReleaseLast();
1143 
1144   // Adds an already-allocated object, skipping arena-ownership checks. The user
1145   // must guarantee that the given object is in the same arena as this
1146   // RepeatedPtrField.
1147   // It is also useful in legacy code that uses temporary ownership to avoid
1148   // copies. Example:
1149   //   RepeatedPtrField<T> temp_field;
1150   //   temp_field.UnsafeArenaAddAllocated(new T);
1151   //   ... // Do something with temp_field
1152   //   temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr);
1153   // If you put temp_field on the arena this fails, because the ownership
1154   // transfers to the arena at the "AddAllocated" call and is not released
1155   // anymore, causing a double delete. UnsafeArenaAddAllocated prevents this.
1156   // Requires:  value != nullptr
1157   void UnsafeArenaAddAllocated(Element* value);
1158 
1159   // Removes and returns the last element.  Unlike ReleaseLast, the returned
1160   // pointer is always to the original object.  This may be in an arena, in
1161   // which case it would have the arena's lifetime.
1162   // Requires: current_size_ > 0
1163   pointer UnsafeArenaReleaseLast();
1164 
1165   // Extracts elements with indices in the range "[start .. start+num-1]".
1166   // The caller assumes ownership of the extracted elements and is responsible
1167   // for deleting them when they are no longer needed.
1168   // If "elements" is non-nullptr, then pointers to the extracted elements
1169   // are stored in "elements[0 .. num-1]" for the convenience of the caller.
1170   // If "elements" is nullptr, then the caller must use some other mechanism
1171   // to perform any further operations (like deletion) on these elements.
1172   // Caution: implementation also moves elements with indices [start+num ..].
1173   // Calling this routine inside a loop can cause quadratic behavior.
1174   //
1175   // Memory copying behavior is identical to ReleaseLast(), described above: if
1176   // this RepeatedPtrField is on an arena, an object copy is performed for each
1177   // returned element, so that all returned element pointers are to
1178   // heap-allocated copies. If this copy is not desired, the user should call
1179   // UnsafeArenaExtractSubrange().
1180   void ExtractSubrange(int start, int num, Element** elements);
1181 
1182   // Identical to ExtractSubrange() described above, except that no object
1183   // copies are ever performed. Instead, the raw object pointers are returned.
1184   // Thus, if on an arena, the returned objects must not be freed, because they
1185   // will not be heap-allocated objects.
1186   void UnsafeArenaExtractSubrange(int start, int num, Element** elements);
1187 
1188   // When elements are removed by calls to RemoveLast() or Clear(), they
1189   // are not actually freed.  Instead, they are cleared and kept so that
1190   // they can be reused later.  This can save lots of CPU time when
1191   // repeatedly reusing a protocol message for similar purposes.
1192   //
1193   // Hardcore programs may choose to manipulate these cleared objects
1194   // to better optimize memory management using the following routines.
1195 
1196   // Gets the number of cleared objects that are currently being kept
1197   // around for reuse.
1198   ABSL_DEPRECATED("This will be removed in a future release")
1199   int ClearedCount() const;
1200 
1201   // Removes the element referenced by position.
1202   //
1203   // Returns an iterator to the element immediately following the removed
1204   // element.
1205   //
1206   // Invalidates all iterators at or after the removed element, including end().
1207   iterator erase(const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND;
1208 
1209   // Removes the elements in the range [first, last).
1210   //
1211   // Returns an iterator to the element immediately following the removed range.
1212   //
1213   // Invalidates all iterators at or after the removed range, including end().
1214   iterator erase(const_iterator first,
1215                  const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND;
1216 
1217   // Gets the arena on which this RepeatedPtrField stores its elements.
1218   inline Arena* GetArena();
1219 
1220   // For internal use only.
1221   //
1222   // This is public due to it being called by generated code.
InternalSwap(RepeatedPtrField * PROTOBUF_RESTRICT other)1223   void InternalSwap(RepeatedPtrField* PROTOBUF_RESTRICT other) {
1224     internal::RepeatedPtrFieldBase::InternalSwap(other);
1225   }
1226 
1227   using RepeatedPtrFieldBase::InternalGetArenaOffset;
1228 
1229 
1230  private:
1231   using InternalArenaConstructable_ = void;
1232   using DestructorSkippable_ = void;
1233 
1234   friend class Arena;
1235 
1236   friend class internal::TcParser;
1237 
1238   template <typename T>
1239   friend struct WeakRepeatedPtrField;
1240 
1241   // Note:  RepeatedPtrField SHOULD NOT be subclassed by users.
1242   using TypeHandler = internal::GenericTypeHandler<Element>;
1243 
1244   RepeatedPtrField(Arena* arena, const RepeatedPtrField& rhs);
1245   RepeatedPtrField(Arena* arena, RepeatedPtrField&& rhs);
1246 
1247 
AddAllocatedForParse(Element * p)1248   void AddAllocatedForParse(Element* p) {
1249     return RepeatedPtrFieldBase::AddAllocatedForParse(p);
1250   }
1251 };
1252 
1253 // -------------------------------------------------------------------
1254 
1255 template <typename Element>
RepeatedPtrField()1256 constexpr RepeatedPtrField<Element>::RepeatedPtrField()
1257     : RepeatedPtrFieldBase() {
1258   StaticValidityCheck();
1259 }
1260 
1261 template <typename Element>
RepeatedPtrField(Arena * arena)1262 inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena)
1263     : RepeatedPtrFieldBase(arena) {
1264   // We can't have StaticValidityCheck here because that requires Element to be
1265   // a complete type, and in split repeated fields cases, we call
1266   // CreateMessage<RepeatedPtrField<T>> for incomplete Ts.
1267 }
1268 
1269 template <typename Element>
RepeatedPtrField(Arena * arena,const RepeatedPtrField & rhs)1270 inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena,
1271                                                    const RepeatedPtrField& rhs)
1272     : RepeatedPtrFieldBase(arena) {
1273   StaticValidityCheck();
1274   MergeFrom(rhs);
1275 }
1276 
1277 template <typename Element>
1278 template <typename Iter, typename>
RepeatedPtrField(Iter begin,Iter end)1279 inline RepeatedPtrField<Element>::RepeatedPtrField(Iter begin, Iter end) {
1280   StaticValidityCheck();
1281   Add(begin, end);
1282 }
1283 
1284 template <typename Element>
~RepeatedPtrField()1285 RepeatedPtrField<Element>::~RepeatedPtrField() {
1286   StaticValidityCheck();
1287   if (!NeedsDestroy()) return;
1288 #ifdef __cpp_if_constexpr
1289   if constexpr (std::is_base_of<MessageLite, Element>::value) {
1290 #else
1291   if (std::is_base_of<MessageLite, Element>::value) {
1292 #endif
1293     DestroyProtos();
1294   } else {
1295     Destroy<TypeHandler>();
1296   }
1297 }
1298 
1299 template <typename Element>
1300 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
1301     const RepeatedPtrField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND {
1302   if (this != &other) CopyFrom(other);
1303   return *this;
1304 }
1305 
1306 template <typename Element>
1307 inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena,
1308                                                    RepeatedPtrField&& rhs)
1309     : RepeatedPtrField(arena) {
1310   // We don't just call Swap(&rhs) here because it would perform 3 copies if rhs
1311   // is on a different arena.
1312   if (internal::CanMoveWithInternalSwap(arena, rhs.GetArena())) {
1313     InternalSwap(&rhs);
1314   } else {
1315     CopyFrom(rhs);
1316   }
1317 }
1318 
1319 template <typename Element>
1320 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
1321     RepeatedPtrField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND {
1322   // We don't just call Swap(&other) here because it would perform 3 copies if
1323   // the two fields are on different arenas.
1324   if (this != &other) {
1325     if (internal::CanMoveWithInternalSwap(GetArena(), other.GetArena())) {
1326       InternalSwap(&other);
1327     } else {
1328       CopyFrom(other);
1329     }
1330   }
1331   return *this;
1332 }
1333 
1334 template <typename Element>
1335 inline bool RepeatedPtrField<Element>::empty() const {
1336   return RepeatedPtrFieldBase::empty();
1337 }
1338 
1339 template <typename Element>
1340 inline int RepeatedPtrField<Element>::size() const {
1341   return RepeatedPtrFieldBase::size();
1342 }
1343 
1344 template <typename Element>
1345 inline const Element& RepeatedPtrField<Element>::Get(int index) const
1346     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1347   return RepeatedPtrFieldBase::Get<TypeHandler>(index);
1348 }
1349 
1350 template <typename Element>
1351 inline const Element& RepeatedPtrField<Element>::at(int index) const
1352     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1353   return RepeatedPtrFieldBase::at<TypeHandler>(index);
1354 }
1355 
1356 template <typename Element>
1357 inline Element& RepeatedPtrField<Element>::at(int index)
1358     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1359   return RepeatedPtrFieldBase::at<TypeHandler>(index);
1360 }
1361 
1362 
1363 template <typename Element>
1364 inline Element* RepeatedPtrField<Element>::Mutable(int index)
1365     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1366   return RepeatedPtrFieldBase::Mutable<TypeHandler>(index);
1367 }
1368 
1369 template <typename Element>
1370 inline Element* RepeatedPtrField<Element>::Add() ABSL_ATTRIBUTE_LIFETIME_BOUND {
1371   return RepeatedPtrFieldBase::Add<TypeHandler>();
1372 }
1373 
1374 template <typename Element>
1375 inline void RepeatedPtrField<Element>::Add(Element&& value) {
1376   RepeatedPtrFieldBase::Add<TypeHandler>(std::move(value));
1377 }
1378 
1379 template <typename Element>
1380 template <typename Iter>
1381 inline void RepeatedPtrField<Element>::Add(Iter begin, Iter end) {
1382   if (std::is_base_of<
1383           std::forward_iterator_tag,
1384           typename std::iterator_traits<Iter>::iterator_category>::value) {
1385     int reserve = static_cast<int>(std::distance(begin, end));
1386     Reserve(size() + reserve);
1387   }
1388   for (; begin != end; ++begin) {
1389     *Add() = *begin;
1390   }
1391 }
1392 
1393 template <typename Element>
1394 inline void RepeatedPtrField<Element>::RemoveLast() {
1395   RepeatedPtrFieldBase::RemoveLast<TypeHandler>();
1396 }
1397 
1398 template <typename Element>
1399 inline void RepeatedPtrField<Element>::DeleteSubrange(int start, int num) {
1400   ABSL_DCHECK_GE(start, 0);
1401   ABSL_DCHECK_GE(num, 0);
1402   ABSL_DCHECK_LE(start + num, size());
1403   void** subrange = raw_mutable_data() + start;
1404   Arena* arena = GetArena();
1405   for (int i = 0; i < num; ++i) {
1406     using H = CommonHandler<TypeHandler>;
1407     H::Delete(static_cast<Element*>(subrange[i]), arena);
1408   }
1409   UnsafeArenaExtractSubrange(start, num, nullptr);
1410 }
1411 
1412 template <typename Element>
1413 inline void RepeatedPtrField<Element>::ExtractSubrange(int start, int num,
1414                                                        Element** elements) {
1415   ABSL_DCHECK_GE(start, 0);
1416   ABSL_DCHECK_GE(num, 0);
1417   ABSL_DCHECK_LE(start + num, size());
1418 
1419   if (num == 0) return;
1420 
1421   ABSL_DCHECK_NE(elements, nullptr)
1422       << "Releasing elements without transferring ownership is an unsafe "
1423          "operation.  Use UnsafeArenaExtractSubrange.";
1424   if (elements != nullptr) {
1425     Arena* arena = GetArena();
1426     auto* extracted = data() + start;
1427     if (internal::DebugHardenForceCopyInRelease()) {
1428       // Always copy.
1429       for (int i = 0; i < num; ++i) {
1430         elements[i] = copy<TypeHandler>(extracted[i]);
1431       }
1432       if (arena == nullptr) {
1433         for (int i = 0; i < num; ++i) {
1434           delete extracted[i];
1435         }
1436       }
1437     } else {
1438       // If we're on an arena, we perform a copy for each element so that the
1439       // returned elements are heap-allocated. Otherwise, just forward it.
1440       if (arena != nullptr) {
1441         for (int i = 0; i < num; ++i) {
1442           elements[i] = copy<TypeHandler>(extracted[i]);
1443         }
1444       } else {
1445         memcpy(elements, extracted, num * sizeof(Element*));
1446       }
1447     }
1448   }
1449   CloseGap(start, num);
1450 }
1451 
1452 template <typename Element>
1453 inline void RepeatedPtrField<Element>::UnsafeArenaExtractSubrange(
1454     int start, int num, Element** elements) {
1455   ABSL_DCHECK_GE(start, 0);
1456   ABSL_DCHECK_GE(num, 0);
1457   ABSL_DCHECK_LE(start + num, size());
1458 
1459   if (num > 0) {
1460     // Save the values of the removed elements if requested.
1461     if (elements != nullptr) {
1462       memcpy(elements, data() + start, num * sizeof(Element*));
1463     }
1464     CloseGap(start, num);
1465   }
1466 }
1467 
1468 template <typename Element>
1469 inline void RepeatedPtrField<Element>::Clear() {
1470   RepeatedPtrFieldBase::Clear<TypeHandler>();
1471 }
1472 
1473 template <typename Element>
1474 inline void RepeatedPtrField<Element>::MergeFrom(
1475     const RepeatedPtrField& other) {
1476   if (other.empty()) return;
1477   RepeatedPtrFieldBase::MergeFrom<Element>(other);
1478 }
1479 
1480 template <typename Element>
1481 inline void RepeatedPtrField<Element>::CopyFrom(const RepeatedPtrField& other) {
1482   RepeatedPtrFieldBase::CopyFrom<TypeHandler>(other);
1483 }
1484 
1485 template <typename Element>
1486 template <typename Iter>
1487 inline void RepeatedPtrField<Element>::Assign(Iter begin, Iter end) {
1488   Clear();
1489   Add(begin, end);
1490 }
1491 
1492 template <typename Element>
1493 inline typename RepeatedPtrField<Element>::iterator
1494 RepeatedPtrField<Element>::erase(const_iterator position)
1495     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1496   return erase(position, position + 1);
1497 }
1498 
1499 template <typename Element>
1500 inline typename RepeatedPtrField<Element>::iterator
1501 RepeatedPtrField<Element>::erase(const_iterator first, const_iterator last)
1502     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1503   size_type pos_offset = static_cast<size_type>(std::distance(cbegin(), first));
1504   size_type last_offset = static_cast<size_type>(std::distance(cbegin(), last));
1505   DeleteSubrange(pos_offset, last_offset - pos_offset);
1506   return begin() + pos_offset;
1507 }
1508 
1509 template <typename Element>
1510 inline Element** RepeatedPtrField<Element>::mutable_data()
1511     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1512   return RepeatedPtrFieldBase::mutable_data<TypeHandler>();
1513 }
1514 
1515 template <typename Element>
1516 inline const Element* const* RepeatedPtrField<Element>::data() const
1517     ABSL_ATTRIBUTE_LIFETIME_BOUND {
1518   return RepeatedPtrFieldBase::data<TypeHandler>();
1519 }
1520 
1521 template <typename Element>
1522 inline void RepeatedPtrField<Element>::Swap(RepeatedPtrField* other) {
1523   if (this == other) return;
1524   RepeatedPtrFieldBase::Swap<TypeHandler>(other);
1525 }
1526 
1527 template <typename Element>
1528 inline void RepeatedPtrField<Element>::UnsafeArenaSwap(
1529     RepeatedPtrField* other) {
1530   if (this == other) return;
1531   ABSL_DCHECK_EQ(GetArena(), other->GetArena());
1532   RepeatedPtrFieldBase::InternalSwap(other);
1533 }
1534 
1535 template <typename Element>
1536 inline void RepeatedPtrField<Element>::SwapElements(int index1, int index2) {
1537   RepeatedPtrFieldBase::SwapElements(index1, index2);
1538 }
1539 
1540 template <typename Element>
1541 inline Arena* RepeatedPtrField<Element>::GetArena() {
1542   return RepeatedPtrFieldBase::GetArena();
1543 }
1544 
1545 template <typename Element>
1546 inline size_t RepeatedPtrField<Element>::SpaceUsedExcludingSelfLong() const {
1547   // `google::protobuf::Message` has a virtual method `SpaceUsedLong`, hence we can
1548   // instantiate just one function for all protobuf messages.
1549   // Note: std::is_base_of requires that `Element` is a concrete class.
1550   using H = typename std::conditional<std::is_base_of<Message, Element>::value,
1551                                       internal::GenericTypeHandler<Message>,
1552                                       TypeHandler>::type;
1553   return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong<H>();
1554 }
1555 
1556 template <typename Element>
1557 inline void RepeatedPtrField<Element>::AddAllocated(Element* value) {
1558   RepeatedPtrFieldBase::AddAllocated<TypeHandler>(value);
1559 }
1560 
1561 template <typename Element>
1562 inline void RepeatedPtrField<Element>::UnsafeArenaAddAllocated(Element* value) {
1563   RepeatedPtrFieldBase::UnsafeArenaAddAllocated<TypeHandler>(value);
1564 }
1565 
1566 template <typename Element>
1567 inline Element* RepeatedPtrField<Element>::ReleaseLast() {
1568   return RepeatedPtrFieldBase::ReleaseLast<TypeHandler>();
1569 }
1570 
1571 template <typename Element>
1572 inline Element* RepeatedPtrField<Element>::UnsafeArenaReleaseLast() {
1573   return RepeatedPtrFieldBase::UnsafeArenaReleaseLast<TypeHandler>();
1574 }
1575 
1576 template <typename Element>
1577 inline int RepeatedPtrField<Element>::ClearedCount() const {
1578   return RepeatedPtrFieldBase::ClearedCount();
1579 }
1580 
1581 template <typename Element>
1582 inline void RepeatedPtrField<Element>::Reserve(int new_size) {
1583   return RepeatedPtrFieldBase::Reserve(new_size);
1584 }
1585 
1586 template <typename Element>
1587 inline int RepeatedPtrField<Element>::Capacity() const {
1588   return RepeatedPtrFieldBase::Capacity();
1589 }
1590 
1591 // -------------------------------------------------------------------
1592 
1593 namespace internal {
1594 
1595 // STL-like iterator implementation for RepeatedPtrField.  You should not
1596 // refer to this class directly; use RepeatedPtrField<T>::iterator instead.
1597 //
1598 // The iterator for RepeatedPtrField<T>, RepeatedPtrIterator<T>, is
1599 // very similar to iterator_ptr<T**> in util/gtl/iterator_adaptors.h,
1600 // but adds random-access operators and is modified to wrap a void** base
1601 // iterator (since RepeatedPtrField stores its array as a void* array and
1602 // casting void** to T** would violate C++ aliasing rules).
1603 //
1604 // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin
1605 // (jyasskin@google.com).
1606 template <typename Element>
1607 class RepeatedPtrIterator {
1608  public:
1609   using iterator = RepeatedPtrIterator<Element>;
1610   using iterator_category = std::random_access_iterator_tag;
1611   using value_type = typename std::remove_const<Element>::type;
1612   using difference_type = std::ptrdiff_t;
1613   using pointer = Element*;
1614   using reference = Element&;
1615 
1616   RepeatedPtrIterator() : it_(nullptr) {}
1617   explicit RepeatedPtrIterator(void* const* it) : it_(it) {}
1618 
1619   // Allows "upcasting" from RepeatedPtrIterator<T**> to
1620   // RepeatedPtrIterator<const T*const*>.
1621   template <typename OtherElement,
1622             typename std::enable_if<std::is_convertible<
1623                 OtherElement*, pointer>::value>::type* = nullptr>
1624   RepeatedPtrIterator(const RepeatedPtrIterator<OtherElement>& other)
1625       : it_(other.it_) {}
1626 
1627   // dereferenceable
1628   reference operator*() const { return *reinterpret_cast<Element*>(*it_); }
1629   pointer operator->() const { return &(operator*()); }
1630 
1631   // {inc,dec}rementable
1632   iterator& operator++() {
1633     ++it_;
1634     return *this;
1635   }
1636   iterator operator++(int) { return iterator(it_++); }
1637   iterator& operator--() {
1638     --it_;
1639     return *this;
1640   }
1641   iterator operator--(int) { return iterator(it_--); }
1642 
1643   // equality_comparable
1644   friend bool operator==(const iterator& x, const iterator& y) {
1645     return x.it_ == y.it_;
1646   }
1647   friend bool operator!=(const iterator& x, const iterator& y) {
1648     return x.it_ != y.it_;
1649   }
1650 
1651   // less_than_comparable
1652   friend bool operator<(const iterator& x, const iterator& y) {
1653     return x.it_ < y.it_;
1654   }
1655   friend bool operator<=(const iterator& x, const iterator& y) {
1656     return x.it_ <= y.it_;
1657   }
1658   friend bool operator>(const iterator& x, const iterator& y) {
1659     return x.it_ > y.it_;
1660   }
1661   friend bool operator>=(const iterator& x, const iterator& y) {
1662     return x.it_ >= y.it_;
1663   }
1664 
1665   // addable, subtractable
1666   iterator& operator+=(difference_type d) {
1667     it_ += d;
1668     return *this;
1669   }
1670   friend iterator operator+(iterator it, const difference_type d) {
1671     it += d;
1672     return it;
1673   }
1674   friend iterator operator+(const difference_type d, iterator it) {
1675     it += d;
1676     return it;
1677   }
1678   iterator& operator-=(difference_type d) {
1679     it_ -= d;
1680     return *this;
1681   }
1682   friend iterator operator-(iterator it, difference_type d) {
1683     it -= d;
1684     return it;
1685   }
1686 
1687   // indexable
1688   reference operator[](difference_type d) const { return *(*this + d); }
1689 
1690   // random access iterator
1691   friend difference_type operator-(iterator it1, iterator it2) {
1692     return it1.it_ - it2.it_;
1693   }
1694 
1695  private:
1696   template <typename OtherElement>
1697   friend class RepeatedPtrIterator;
1698 
1699   // The internal iterator.
1700   void* const* it_;
1701 };
1702 
1703 template <typename Traits, typename = void>
1704 struct IteratorConceptSupport {
1705   using tag = typename Traits::iterator_category;
1706 };
1707 
1708 template <typename Traits>
1709 struct IteratorConceptSupport<Traits,
1710                               absl::void_t<typename Traits::iterator_concept>> {
1711   using tag = typename Traits::iterator_concept;
1712 };
1713 
1714 // Provides an iterator that operates on pointers to the underlying objects
1715 // rather than the objects themselves as RepeatedPtrIterator does.
1716 // Consider using this when working with stl algorithms that change
1717 // the array.
1718 // The VoidPtr template parameter holds the type-agnostic pointer value
1719 // referenced by the iterator.  It should either be "void *" for a mutable
1720 // iterator, or "const void* const" for a constant iterator.
1721 template <typename Element, typename VoidPtr>
1722 class RepeatedPtrOverPtrsIterator {
1723  private:
1724   using traits =
1725       std::iterator_traits<typename std::remove_const<Element>::type*>;
1726 
1727  public:
1728   using value_type = typename traits::value_type;
1729   using difference_type = typename traits::difference_type;
1730   using pointer = Element*;
1731   using reference = Element&;
1732   using iterator_category = typename traits::iterator_category;
1733   using iterator_concept = typename IteratorConceptSupport<traits>::tag;
1734 
1735   using iterator = RepeatedPtrOverPtrsIterator<Element, VoidPtr>;
1736 
1737   RepeatedPtrOverPtrsIterator() : it_(nullptr) {}
1738   explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {}
1739 
1740   // Allows "upcasting" from RepeatedPtrOverPtrsIterator<T**> to
1741   // RepeatedPtrOverPtrsIterator<const T*const*>.
1742   template <
1743       typename OtherElement, typename OtherVoidPtr,
1744       typename std::enable_if<
1745           std::is_convertible<OtherElement*, pointer>::value &&
1746           std::is_convertible<OtherVoidPtr*, VoidPtr>::value>::type* = nullptr>
1747   RepeatedPtrOverPtrsIterator(
1748       const RepeatedPtrOverPtrsIterator<OtherElement, OtherVoidPtr>& other)
1749       : it_(other.it_) {}
1750 
1751   // dereferenceable
1752   reference operator*() const { return *reinterpret_cast<Element*>(it_); }
1753   pointer operator->() const { return reinterpret_cast<Element*>(it_); }
1754 
1755   // {inc,dec}rementable
1756   iterator& operator++() {
1757     ++it_;
1758     return *this;
1759   }
1760   iterator operator++(int) { return iterator(it_++); }
1761   iterator& operator--() {
1762     --it_;
1763     return *this;
1764   }
1765   iterator operator--(int) { return iterator(it_--); }
1766 
1767   // equality_comparable
1768   friend bool operator==(const iterator& x, const iterator& y) {
1769     return x.it_ == y.it_;
1770   }
1771   friend bool operator!=(const iterator& x, const iterator& y) {
1772     return x.it_ != y.it_;
1773   }
1774 
1775   // less_than_comparable
1776   friend bool operator<(const iterator& x, const iterator& y) {
1777     return x.it_ < y.it_;
1778   }
1779   friend bool operator<=(const iterator& x, const iterator& y) {
1780     return x.it_ <= y.it_;
1781   }
1782   friend bool operator>(const iterator& x, const iterator& y) {
1783     return x.it_ > y.it_;
1784   }
1785   friend bool operator>=(const iterator& x, const iterator& y) {
1786     return x.it_ >= y.it_;
1787   }
1788 
1789   // addable, subtractable
1790   iterator& operator+=(difference_type d) {
1791     it_ += d;
1792     return *this;
1793   }
1794   friend iterator operator+(iterator it, difference_type d) {
1795     it += d;
1796     return it;
1797   }
1798   friend iterator operator+(difference_type d, iterator it) {
1799     it += d;
1800     return it;
1801   }
1802   iterator& operator-=(difference_type d) {
1803     it_ -= d;
1804     return *this;
1805   }
1806   friend iterator operator-(iterator it, difference_type d) {
1807     it -= d;
1808     return it;
1809   }
1810 
1811   // indexable
1812   reference operator[](difference_type d) const { return *(*this + d); }
1813 
1814   // random access iterator
1815   friend difference_type operator-(iterator it1, iterator it2) {
1816     return it1.it_ - it2.it_;
1817   }
1818 
1819  private:
1820   template <typename OtherElement, typename OtherVoidPtr>
1821   friend class RepeatedPtrOverPtrsIterator;
1822 
1823   // The internal iterator.
1824   VoidPtr* it_;
1825 };
1826 
1827 }  // namespace internal
1828 
1829 template <typename Element>
1830 inline typename RepeatedPtrField<Element>::iterator
1831 RepeatedPtrField<Element>::begin() ABSL_ATTRIBUTE_LIFETIME_BOUND {
1832   return iterator(raw_data());
1833 }
1834 template <typename Element>
1835 inline typename RepeatedPtrField<Element>::const_iterator
1836 RepeatedPtrField<Element>::begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1837   return iterator(raw_data());
1838 }
1839 template <typename Element>
1840 inline typename RepeatedPtrField<Element>::const_iterator
1841 RepeatedPtrField<Element>::cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1842   return begin();
1843 }
1844 template <typename Element>
1845 inline typename RepeatedPtrField<Element>::iterator
1846 RepeatedPtrField<Element>::end() ABSL_ATTRIBUTE_LIFETIME_BOUND {
1847   return iterator(raw_data() + size());
1848 }
1849 template <typename Element>
1850 inline typename RepeatedPtrField<Element>::const_iterator
1851 RepeatedPtrField<Element>::end() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1852   return iterator(raw_data() + size());
1853 }
1854 template <typename Element>
1855 inline typename RepeatedPtrField<Element>::const_iterator
1856 RepeatedPtrField<Element>::cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1857   return end();
1858 }
1859 
1860 template <typename Element>
1861 inline typename RepeatedPtrField<Element>::pointer_iterator
1862 RepeatedPtrField<Element>::pointer_begin() ABSL_ATTRIBUTE_LIFETIME_BOUND {
1863   return pointer_iterator(raw_mutable_data());
1864 }
1865 template <typename Element>
1866 inline typename RepeatedPtrField<Element>::const_pointer_iterator
1867 RepeatedPtrField<Element>::pointer_begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1868   return const_pointer_iterator(const_cast<const void* const*>(raw_data()));
1869 }
1870 template <typename Element>
1871 inline typename RepeatedPtrField<Element>::pointer_iterator
1872 RepeatedPtrField<Element>::pointer_end() ABSL_ATTRIBUTE_LIFETIME_BOUND {
1873   return pointer_iterator(raw_mutable_data() + size());
1874 }
1875 template <typename Element>
1876 inline typename RepeatedPtrField<Element>::const_pointer_iterator
1877 RepeatedPtrField<Element>::pointer_end() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
1878   return const_pointer_iterator(
1879       const_cast<const void* const*>(raw_data() + size()));
1880 }
1881 
1882 // Iterators and helper functions that follow the spirit of the STL
1883 // std::back_insert_iterator and std::back_inserter but are tailor-made
1884 // for RepeatedField and RepeatedPtrField. Typical usage would be:
1885 //
1886 //   std::copy(some_sequence.begin(), some_sequence.end(),
1887 //             RepeatedFieldBackInserter(proto.mutable_sequence()));
1888 //
1889 // Ported by johannes from util/gtl/proto-array-iterators.h
1890 
1891 namespace internal {
1892 
1893 // A back inserter for RepeatedPtrField objects.
1894 template <typename T>
1895 class RepeatedPtrFieldBackInsertIterator {
1896  public:
1897   using iterator_category = std::output_iterator_tag;
1898   using value_type = T;
1899   using pointer = void;
1900   using reference = void;
1901   using difference_type = std::ptrdiff_t;
1902 
1903   RepeatedPtrFieldBackInsertIterator(RepeatedPtrField<T>* const mutable_field)
1904       : field_(mutable_field) {}
1905   RepeatedPtrFieldBackInsertIterator<T>& operator=(const T& value) {
1906     *field_->Add() = value;
1907     return *this;
1908   }
1909   RepeatedPtrFieldBackInsertIterator<T>& operator=(
1910       const T* const ptr_to_value) {
1911     *field_->Add() = *ptr_to_value;
1912     return *this;
1913   }
1914   RepeatedPtrFieldBackInsertIterator<T>& operator=(T&& value) {
1915     *field_->Add() = std::move(value);
1916     return *this;
1917   }
1918   RepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
1919   RepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
1920   RepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
1921     return *this;
1922   }
1923 
1924  private:
1925   RepeatedPtrField<T>* field_;
1926 };
1927 
1928 // A back inserter for RepeatedPtrFields that inserts by transferring ownership
1929 // of a pointer.
1930 template <typename T>
1931 class AllocatedRepeatedPtrFieldBackInsertIterator {
1932  public:
1933   using iterator_category = std::output_iterator_tag;
1934   using value_type = T;
1935   using pointer = void;
1936   using reference = void;
1937   using difference_type = std::ptrdiff_t;
1938 
1939   explicit AllocatedRepeatedPtrFieldBackInsertIterator(
1940       RepeatedPtrField<T>* const mutable_field)
1941       : field_(mutable_field) {}
1942   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
1943       T* const ptr_to_value) {
1944     field_->AddAllocated(ptr_to_value);
1945     return *this;
1946   }
1947   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
1948   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
1949   AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
1950     return *this;
1951   }
1952 
1953  private:
1954   RepeatedPtrField<T>* field_;
1955 };
1956 
1957 // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one
1958 // uses the UnsafeArenaAddAllocated instead.
1959 template <typename T>
1960 class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator {
1961  public:
1962   using iterator_category = std::output_iterator_tag;
1963   using value_type = T;
1964   using pointer = void;
1965   using reference = void;
1966   using difference_type = std::ptrdiff_t;
1967 
1968   explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator(
1969       RepeatedPtrField<T>* const mutable_field)
1970       : field_(mutable_field) {}
1971   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
1972       T const* const ptr_to_value) {
1973     field_->UnsafeArenaAddAllocated(const_cast<T*>(ptr_to_value));
1974     return *this;
1975   }
1976   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() {
1977     return *this;
1978   }
1979   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() {
1980     return *this;
1981   }
1982   UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(
1983       int /* unused */) {
1984     return *this;
1985   }
1986 
1987  private:
1988   RepeatedPtrField<T>* field_;
1989 };
1990 
1991 }  // namespace internal
1992 
1993 // Provides a back insert iterator for RepeatedPtrField instances,
1994 // similar to std::back_inserter().
1995 template <typename T>
1996 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedPtrFieldBackInserter(
1997     RepeatedPtrField<T>* const mutable_field) {
1998   return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
1999 }
2000 
2001 // Special back insert iterator for RepeatedPtrField instances, just in
2002 // case someone wants to write generic template code that can access both
2003 // RepeatedFields and RepeatedPtrFields using a common name.
2004 template <typename T>
2005 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedFieldBackInserter(
2006     RepeatedPtrField<T>* const mutable_field) {
2007   return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
2008 }
2009 
2010 // Provides a back insert iterator for RepeatedPtrField instances
2011 // similar to std::back_inserter() which transfers the ownership while
2012 // copying elements.
2013 template <typename T>
2014 internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>
2015 AllocatedRepeatedPtrFieldBackInserter(
2016     RepeatedPtrField<T>* const mutable_field) {
2017   return internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>(
2018       mutable_field);
2019 }
2020 
2021 // Similar to AllocatedRepeatedPtrFieldBackInserter, using
2022 // UnsafeArenaAddAllocated instead of AddAllocated.
2023 // This is slightly faster if that matters. It is also useful in legacy code
2024 // that uses temporary ownership to avoid copies. Example:
2025 //   RepeatedPtrField<T> temp_field;
2026 //   temp_field.UnsafeArenaAddAllocated(new T);
2027 //   ... // Do something with temp_field
2028 //   temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr);
2029 // Putting temp_field on the arena fails because the ownership transfers to the
2030 // arena at the "AddAllocated" call and is not released anymore causing a
2031 // double delete. This function uses UnsafeArenaAddAllocated to prevent this.
2032 template <typename T>
2033 internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>
2034 UnsafeArenaAllocatedRepeatedPtrFieldBackInserter(
2035     RepeatedPtrField<T>* const mutable_field) {
2036   return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>(
2037       mutable_field);
2038 }
2039 
2040 
2041 namespace internal {
2042 // Size optimization for `memswap<N>` - supplied below N is used by every
2043 // `RepeatedPtrField<T>`.
2044 extern template PROTOBUF_EXPORT_TEMPLATE_DECLARE void
2045 memswap<ArenaOffsetHelper<RepeatedPtrFieldBase>::value>(
2046     char* PROTOBUF_RESTRICT, char* PROTOBUF_RESTRICT);
2047 }  // namespace internal
2048 
2049 }  // namespace protobuf
2050 }  // namespace google
2051 
2052 #include "google/protobuf/port_undef.inc"
2053 
2054 #endif  // GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__
2055