• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc.  All rights reserved.
3 // https://developers.google.com/protocol-buffers/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 //     * Redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer.
11 //     * Redistributions in binary form must reproduce the above
12 // copyright notice, this list of conditions and the following disclaimer
13 // in the documentation and/or other materials provided with the
14 // distribution.
15 //     * Neither the name of Google Inc. nor the names of its
16 // contributors may be used to endorse or promote products derived from
17 // this software without specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // Author: kenton@google.com (Kenton Varda)
32 //  Based on original Protocol Buffers design by
33 //  Sanjay Ghemawat, Jeff Dean, and others.
34 //
35 // RepeatedField and RepeatedPtrField are used by generated protocol message
36 // classes to manipulate repeated fields.  These classes are very similar to
37 // STL's vector, but include a number of optimizations found to be useful
38 // specifically in the case of Protocol Buffers.  RepeatedPtrField is
39 // particularly different from STL vector as it manages ownership of the
40 // pointers that it contains.
41 //
42 // This header covers RepeatedField.
43 
44 #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
45 #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
46 
47 
48 #include <algorithm>
49 #include <iterator>
50 #include <limits>
51 #include <string>
52 #include <type_traits>
53 #include <utility>
54 
55 #include <google/protobuf/stubs/logging.h>
56 #include <google/protobuf/stubs/common.h>
57 #include <google/protobuf/arena.h>
58 #include <google/protobuf/port.h>
59 #include <google/protobuf/message_lite.h>
60 #include <google/protobuf/repeated_ptr_field.h>
61 
62 
63 // Must be included last.
64 #include <google/protobuf/port_def.inc>
65 
66 #ifdef SWIG
67 #error "You cannot SWIG proto headers"
68 #endif
69 
70 namespace google {
71 namespace protobuf {
72 
73 class Message;
74 
75 namespace internal {
76 
77 template <typename T, int kRepHeaderSize>
RepeatedFieldLowerClampLimit()78 constexpr int RepeatedFieldLowerClampLimit() {
79   // The header is padded to be at least `sizeof(T)` when it would be smaller
80   // otherwise.
81   static_assert(sizeof(T) <= kRepHeaderSize, "");
82   // We want to pad the minimum size to be a power of two bytes, including the
83   // header.
84   // The first allocation is kRepHeaderSize bytes worth of elements for a total
85   // of 2*kRepHeaderSize bytes.
86   // For an 8-byte header, we allocate 8 bool, 2 ints, or 1 int64.
87   return kRepHeaderSize / sizeof(T);
88 }
89 
90 // kRepeatedFieldUpperClampLimit is the lowest signed integer value that
91 // overflows when multiplied by 2 (which is undefined behavior). Sizes above
92 // this will clamp to the maximum int value instead of following exponential
93 // growth when growing a repeated field.
94 constexpr int kRepeatedFieldUpperClampLimit =
95     (std::numeric_limits<int>::max() / 2) + 1;
96 
97 template <typename Iter>
CalculateReserve(Iter begin,Iter end,std::forward_iterator_tag)98 inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag) {
99   return static_cast<int>(std::distance(begin, end));
100 }
101 
102 template <typename Iter>
CalculateReserve(Iter,Iter,std::input_iterator_tag)103 inline int CalculateReserve(Iter /*begin*/, Iter /*end*/,
104                             std::input_iterator_tag /*unused*/) {
105   return -1;
106 }
107 
108 template <typename Iter>
CalculateReserve(Iter begin,Iter end)109 inline int CalculateReserve(Iter begin, Iter end) {
110   typedef typename std::iterator_traits<Iter>::iterator_category Category;
111   return CalculateReserve(begin, end, Category());
112 }
113 
114 // Swaps two blocks of memory of size sizeof(T).
115 template <typename T>
SwapBlock(char * p,char * q)116 inline void SwapBlock(char* p, char* q) {
117   T tmp;
118   memcpy(&tmp, p, sizeof(T));
119   memcpy(p, q, sizeof(T));
120   memcpy(q, &tmp, sizeof(T));
121 }
122 
123 // Swaps two blocks of memory of size kSize:
124 //  template <int kSize> void memswap(char* p, char* q);
125 template <int kSize>
memswap(char *,char *)126 inline typename std::enable_if<(kSize == 0), void>::type memswap(char*, char*) {
127 }
128 
129 #define PROTO_MEMSWAP_DEF_SIZE(reg_type, max_size)                           \
130   template <int kSize>                                                       \
131   typename std::enable_if<(kSize >= sizeof(reg_type) && kSize < (max_size)), \
132                           void>::type                                        \
133   memswap(char* p, char* q) {                                                \
134     SwapBlock<reg_type>(p, q);                                               \
135     memswap<kSize - sizeof(reg_type)>(p + sizeof(reg_type),                  \
136                                       q + sizeof(reg_type));                 \
137   }
138 
139 PROTO_MEMSWAP_DEF_SIZE(uint8_t, 2)
140 PROTO_MEMSWAP_DEF_SIZE(uint16_t, 4)
141 PROTO_MEMSWAP_DEF_SIZE(uint32_t, 8)
142 
143 #ifdef __SIZEOF_INT128__
144 PROTO_MEMSWAP_DEF_SIZE(uint64_t, 16)
145 PROTO_MEMSWAP_DEF_SIZE(__uint128_t, (1u << 31))
146 #else
147 PROTO_MEMSWAP_DEF_SIZE(uint64_t, (1u << 31))
148 #endif
149 
150 #undef PROTO_MEMSWAP_DEF_SIZE
151 
152 template <typename Element>
153 class RepeatedIterator;
154 
155 }  // namespace internal
156 
157 // RepeatedField is used to represent repeated fields of a primitive type (in
158 // other words, everything except strings and nested Messages).  Most users will
159 // not ever use a RepeatedField directly; they will use the get-by-index,
160 // set-by-index, and add accessors that are generated for all repeated fields.
161 template <typename Element>
162 class RepeatedField final {
163   static_assert(
164       alignof(Arena) >= alignof(Element),
165       "We only support types that have an alignment smaller than Arena");
166 
167  public:
168   constexpr RepeatedField();
169   explicit RepeatedField(Arena* arena);
170 
171   RepeatedField(const RepeatedField& other);
172 
173   template <typename Iter,
174             typename = typename std::enable_if<std::is_constructible<
175                 Element, decltype(*std::declval<Iter>())>::value>::type>
176   RepeatedField(Iter begin, Iter end);
177 
178   ~RepeatedField();
179 
180   RepeatedField& operator=(const RepeatedField& other);
181 
182   RepeatedField(RepeatedField&& other) noexcept;
183   RepeatedField& operator=(RepeatedField&& other) noexcept;
184 
185   bool empty() const;
186   int size() const;
187 
188   const Element& Get(int index) const;
189   Element* Mutable(int index);
190 
191   const Element& operator[](int index) const { return Get(index); }
192   Element& operator[](int index) { return *Mutable(index); }
193 
194   const Element& at(int index) const;
195   Element& at(int index);
196 
197   void Set(int index, const Element& value);
198   void Add(const Element& value);
199   // Appends a new element and returns a pointer to it.
200   // The new element is uninitialized if |Element| is a POD type.
201   Element* Add();
202   // Appends elements in the range [begin, end) after reserving
203   // the appropriate number of elements.
204   template <typename Iter>
205   void Add(Iter begin, Iter end);
206 
207   // Removes the last element in the array.
208   void RemoveLast();
209 
210   // Extracts elements with indices in "[start .. start+num-1]".
211   // Copies them into "elements[0 .. num-1]" if "elements" is not nullptr.
212   // Caution: also moves elements with indices [start+num ..].
213   // Calling this routine inside a loop can cause quadratic behavior.
214   void ExtractSubrange(int start, int num, Element* elements);
215 
216   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear();
217   void MergeFrom(const RepeatedField& other);
218   PROTOBUF_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedField& other);
219 
220   // Replaces the contents with RepeatedField(begin, end).
221   template <typename Iter>
222   PROTOBUF_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end);
223 
224   // Reserves space to expand the field to at least the given size.  If the
225   // array is grown, it will always be at least doubled in size.
226   void Reserve(int new_size);
227 
228   // Resizes the RepeatedField to a new, smaller size.  This is O(1).
229   void Truncate(int new_size);
230 
231   void AddAlreadyReserved(const Element& value);
232   // Appends a new element and return a pointer to it.
233   // The new element is uninitialized if |Element| is a POD type.
234   // Should be called only if Capacity() > Size().
235   Element* AddAlreadyReserved();
236   Element* AddNAlreadyReserved(int elements);
237   int Capacity() const;
238 
239   // Like STL resize.  Uses value to fill appended elements.
240   // Like Truncate() if new_size <= size(), otherwise this is
241   // O(new_size - size()).
242   void Resize(int new_size, const Element& value);
243 
244   // Gets the underlying array.  This pointer is possibly invalidated by
245   // any add or remove operation.
246   Element* mutable_data();
247   const Element* data() const;
248 
249   // Swaps entire contents with "other". If they are separate arenas then,
250   // copies data between each other.
251   void Swap(RepeatedField* other);
252 
253   // Swaps entire contents with "other". Should be called only if the caller can
254   // guarantee that both repeated fields are on the same arena or are on the
255   // heap. Swapping between different arenas is disallowed and caught by a
256   // GOOGLE_DCHECK (see API docs for details).
257   void UnsafeArenaSwap(RepeatedField* other);
258 
259   // Swaps two elements.
260   void SwapElements(int index1, int index2);
261 
262   // STL-like iterator support
263   typedef internal::RepeatedIterator<Element> iterator;
264   typedef internal::RepeatedIterator<const Element> const_iterator;
265   typedef Element value_type;
266   typedef value_type& reference;
267   typedef const value_type& const_reference;
268   typedef value_type* pointer;
269   typedef const value_type* const_pointer;
270   typedef int size_type;
271   typedef ptrdiff_t difference_type;
272 
273   iterator begin();
274   const_iterator begin() const;
275   const_iterator cbegin() const;
276   iterator end();
277   const_iterator end() const;
278   const_iterator cend() const;
279 
280   // Reverse iterator support
281   typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
282   typedef std::reverse_iterator<iterator> reverse_iterator;
rbegin()283   reverse_iterator rbegin() { return reverse_iterator(end()); }
rbegin()284   const_reverse_iterator rbegin() const {
285     return const_reverse_iterator(end());
286   }
rend()287   reverse_iterator rend() { return reverse_iterator(begin()); }
rend()288   const_reverse_iterator rend() const {
289     return const_reverse_iterator(begin());
290   }
291 
292   // Returns the number of bytes used by the repeated field, excluding
293   // sizeof(*this)
294   size_t SpaceUsedExcludingSelfLong() const;
295 
SpaceUsedExcludingSelf()296   int SpaceUsedExcludingSelf() const {
297     return internal::ToIntSize(SpaceUsedExcludingSelfLong());
298   }
299 
300   // Removes the element referenced by position.
301   //
302   // Returns an iterator to the element immediately following the removed
303   // element.
304   //
305   // Invalidates all iterators at or after the removed element, including end().
306   iterator erase(const_iterator position);
307 
308   // Removes the elements in the range [first, last).
309   //
310   // Returns an iterator to the element immediately following the removed range.
311   //
312   // Invalidates all iterators at or after the removed range, including end().
313   iterator erase(const_iterator first, const_iterator last);
314 
315   // Gets the Arena on which this RepeatedField stores its elements.
GetArena()316   inline Arena* GetArena() const {
317     return GetOwningArena();
318   }
319 
320   // For internal use only.
321   //
322   // This is public due to it being called by generated code.
323   inline void InternalSwap(RepeatedField* other);
324 
325  private:
326   template <typename T> friend class Arena::InternalHelper;
327 
328   // Gets the Arena on which this RepeatedField stores its elements.
GetOwningArena()329   inline Arena* GetOwningArena() const {
330     return (total_size_ == 0) ? static_cast<Arena*>(arena_or_elements_)
331                               : rep()->arena;
332   }
333 
334   static constexpr int kInitialSize = 0;
335   // A note on the representation here (see also comment below for
336   // RepeatedPtrFieldBase's struct Rep):
337   //
338   // We maintain the same sizeof(RepeatedField) as before we added arena support
339   // so that we do not degrade performance by bloating memory usage. Directly
340   // adding an arena_ element to RepeatedField is quite costly. By using
341   // indirection in this way, we keep the same size when the RepeatedField is
342   // empty (common case), and add only an 8-byte header to the elements array
343   // when non-empty. We make sure to place the size fields directly in the
344   // RepeatedField class to avoid costly cache misses due to the indirection.
345   int current_size_;
346   int total_size_;
347   // Pad the Rep after arena allow for power-of-two byte sizes when
348   // sizeof(Element) > sizeof(Arena*). eg for 16-byte objects.
349   static constexpr size_t kRepHeaderSize =
350       sizeof(Arena*) < sizeof(Element) ? sizeof(Element) : sizeof(Arena*);
351   struct Rep {
352     Arena* arena;
elementsRep353     Element* elements() {
354       return reinterpret_cast<Element*>(reinterpret_cast<char*>(this) +
355                                         kRepHeaderSize);
356     }
357   };
358 
359   // If total_size_ == 0 this points to an Arena otherwise it points to the
360   // elements member of a Rep struct. Using this invariant allows the storage of
361   // the arena pointer without an extra allocation in the constructor.
362   void* arena_or_elements_;
363 
364   // Returns a pointer to elements array.
365   // pre-condition: the array must have been allocated.
elements()366   Element* elements() const {
367     GOOGLE_DCHECK_GT(total_size_, 0);
368     // Because of above pre-condition this cast is safe.
369     return unsafe_elements();
370   }
371 
372   // Returns a pointer to elements array if it exists; otherwise either null or
373   // an invalid pointer is returned. This only happens for empty repeated
374   // fields, where you can't dereference this pointer anyway (it's empty).
unsafe_elements()375   Element* unsafe_elements() const {
376     return static_cast<Element*>(arena_or_elements_);
377   }
378 
379   // Returns a pointer to the Rep struct.
380   // pre-condition: the Rep must have been allocated, ie elements() is safe.
rep()381   Rep* rep() const {
382     return reinterpret_cast<Rep*>(reinterpret_cast<char*>(elements()) -
383                                   kRepHeaderSize);
384   }
385 
386   friend class Arena;
387   typedef void InternalArenaConstructable_;
388 
389   // Moves the contents of |from| into |to|, possibly clobbering |from| in the
390   // process.  For primitive types this is just a memcpy(), but it could be
391   // specialized for non-primitive types to, say, swap each element instead.
392   void MoveArray(Element* to, Element* from, int size);
393 
394   // Copies the elements of |from| into |to|.
395   void CopyArray(Element* to, const Element* from, int size);
396 
397   // Internal helper to delete all elements and deallocate the storage.
InternalDeallocate(Rep * rep,int size,bool in_destructor)398   void InternalDeallocate(Rep* rep, int size, bool in_destructor) {
399     if (rep != nullptr) {
400       Element* e = &rep->elements()[0];
401       if (!std::is_trivial<Element>::value) {
402         Element* limit = &rep->elements()[size];
403         for (; e < limit; e++) {
404           e->~Element();
405         }
406       }
407       const size_t bytes = size * sizeof(*e) + kRepHeaderSize;
408       if (rep->arena == nullptr) {
409         internal::SizedDelete(rep, bytes);
410       } else if (!in_destructor) {
411         // If we are in the destructor, we might be being destroyed as part of
412         // the arena teardown. We can't try and return blocks to the arena then.
413         rep->arena->ReturnArrayMemory(rep, bytes);
414       }
415     }
416   }
417 
418   // This class is a performance wrapper around RepeatedField::Add(const T&)
419   // function. In general unless a RepeatedField is a local stack variable LLVM
420   // has a hard time optimizing Add. The machine code tends to be
421   // loop:
422   // mov %size, dword ptr [%repeated_field]       // load
423   // cmp %size, dword ptr [%repeated_field + 4]
424   // jae fallback
425   // mov %buffer, qword ptr [%repeated_field + 8]
426   // mov dword [%buffer + %size * 4], %value
427   // inc %size                                    // increment
428   // mov dword ptr [%repeated_field], %size       // store
429   // jmp loop
430   //
431   // This puts a load/store in each iteration of the important loop variable
432   // size. It's a pretty bad compile that happens even in simple cases, but
433   // largely the presence of the fallback path disturbs the compilers mem-to-reg
434   // analysis.
435   //
436   // This class takes ownership of a repeated field for the duration of its
437   // lifetime. The repeated field should not be accessed during this time, ie.
438   // only access through this class is allowed. This class should always be a
439   // function local stack variable. Intended use
440   //
441   // void AddSequence(const int* begin, const int* end, RepeatedField<int>* out)
442   // {
443   //   RepeatedFieldAdder<int> adder(out);  // Take ownership of out
444   //   for (auto it = begin; it != end; ++it) {
445   //     adder.Add(*it);
446   //   }
447   // }
448   //
449   // Typically, due to the fact that adder is a local stack variable, the
450   // compiler will be successful in mem-to-reg transformation and the machine
451   // code will be loop: cmp %size, %capacity jae fallback mov dword ptr [%buffer
452   // + %size * 4], %val inc %size jmp loop
453   //
454   // The first version executes at 7 cycles per iteration while the second
455   // version executes at only 1 or 2 cycles.
456   template <int = 0, bool = std::is_trivial<Element>::value>
457   class FastAdderImpl {
458    public:
FastAdderImpl(RepeatedField * rf)459     explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) {
460       index_ = repeated_field_->current_size_;
461       capacity_ = repeated_field_->total_size_;
462       buffer_ = repeated_field_->unsafe_elements();
463     }
~FastAdderImpl()464     ~FastAdderImpl() { repeated_field_->current_size_ = index_; }
465 
Add(Element val)466     void Add(Element val) {
467       if (index_ == capacity_) {
468         repeated_field_->current_size_ = index_;
469         repeated_field_->Reserve(index_ + 1);
470         capacity_ = repeated_field_->total_size_;
471         buffer_ = repeated_field_->unsafe_elements();
472       }
473       buffer_[index_++] = val;
474     }
475 
476    private:
477     RepeatedField* repeated_field_;
478     int index_;
479     int capacity_;
480     Element* buffer_;
481 
482     GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
483   };
484 
485   // FastAdder is a wrapper for adding fields. The specialization above handles
486   // POD types more efficiently than RepeatedField.
487   template <int I>
488   class FastAdderImpl<I, false> {
489    public:
FastAdderImpl(RepeatedField * rf)490     explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) {}
Add(const Element & val)491     void Add(const Element& val) { repeated_field_->Add(val); }
492 
493    private:
494     RepeatedField* repeated_field_;
495     GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
496   };
497 
498   using FastAdder = FastAdderImpl<>;
499 
500   friend class TestRepeatedFieldHelper;
501   friend class ::google::protobuf::internal::ParseContext;
502 };
503 
504 namespace internal {
505 
506 // This is a helper template to copy an array of elements efficiently when they
507 // have a trivial copy constructor, and correctly otherwise. This really
508 // shouldn't be necessary, but our compiler doesn't optimize std::copy very
509 // effectively.
510 template <typename Element,
511           bool HasTrivialCopy = std::is_trivial<Element>::value>
512 struct ElementCopier {
513   void operator()(Element* to, const Element* from, int array_size);
514 };
515 
516 }  // namespace internal
517 
518 // implementation ====================================================
519 
520 template <typename Element>
RepeatedField()521 constexpr RepeatedField<Element>::RepeatedField()
522     : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {}
523 
524 template <typename Element>
RepeatedField(Arena * arena)525 inline RepeatedField<Element>::RepeatedField(Arena* arena)
526     : current_size_(0), total_size_(0), arena_or_elements_(arena) {}
527 
528 template <typename Element>
RepeatedField(const RepeatedField & other)529 inline RepeatedField<Element>::RepeatedField(const RepeatedField& other)
530     : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
531   if (other.current_size_ != 0) {
532     Reserve(other.size());
533     AddNAlreadyReserved(other.size());
534     CopyArray(Mutable(0), &other.Get(0), other.size());
535   }
536 }
537 
538 template <typename Element>
539 template <typename Iter, typename>
RepeatedField(Iter begin,Iter end)540 RepeatedField<Element>::RepeatedField(Iter begin, Iter end)
541     : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
542   Add(begin, end);
543 }
544 
545 template <typename Element>
~RepeatedField()546 RepeatedField<Element>::~RepeatedField() {
547 #ifndef NDEBUG
548   // Try to trigger segfault / asan failure in non-opt builds if arena_
549   // lifetime has ended before the destructor.
550   auto arena = GetArena();
551   if (arena) (void)arena->SpaceAllocated();
552 #endif
553   if (total_size_ > 0) {
554     InternalDeallocate(rep(), total_size_, true);
555   }
556 }
557 
558 template <typename Element>
559 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
560     const RepeatedField& other) {
561   if (this != &other) CopyFrom(other);
562   return *this;
563 }
564 
565 template <typename Element>
RepeatedField(RepeatedField && other)566 inline RepeatedField<Element>::RepeatedField(RepeatedField&& other) noexcept
567     : RepeatedField() {
568 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
569   CopyFrom(other);
570 #else   // PROTOBUF_FORCE_COPY_IN_MOVE
571   // We don't just call Swap(&other) here because it would perform 3 copies if
572   // other is on an arena. This field can't be on an arena because arena
573   // construction always uses the Arena* accepting constructor.
574   if (other.GetArena()) {
575     CopyFrom(other);
576   } else {
577     InternalSwap(&other);
578   }
579 #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
580 }
581 
582 template <typename Element>
583 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
584     RepeatedField&& other) noexcept {
585   // We don't just call Swap(&other) here because it would perform 3 copies if
586   // the two fields are on different arenas.
587   if (this != &other) {
588     if (GetArena() != other.GetArena()
589 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
590         || GetArena() == nullptr
591 #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
592     ) {
593       CopyFrom(other);
594     } else {
595       InternalSwap(&other);
596     }
597   }
598   return *this;
599 }
600 
601 template <typename Element>
empty()602 inline bool RepeatedField<Element>::empty() const {
603   return current_size_ == 0;
604 }
605 
606 template <typename Element>
size()607 inline int RepeatedField<Element>::size() const {
608   return current_size_;
609 }
610 
611 template <typename Element>
Capacity()612 inline int RepeatedField<Element>::Capacity() const {
613   return total_size_;
614 }
615 
616 template <typename Element>
AddAlreadyReserved(const Element & value)617 inline void RepeatedField<Element>::AddAlreadyReserved(const Element& value) {
618   GOOGLE_DCHECK_LT(current_size_, total_size_);
619   elements()[current_size_++] = value;
620 }
621 
622 template <typename Element>
AddAlreadyReserved()623 inline Element* RepeatedField<Element>::AddAlreadyReserved() {
624   GOOGLE_DCHECK_LT(current_size_, total_size_);
625   return &elements()[current_size_++];
626 }
627 
628 template <typename Element>
AddNAlreadyReserved(int elements)629 inline Element* RepeatedField<Element>::AddNAlreadyReserved(int elements) {
630   GOOGLE_DCHECK_GE(total_size_ - current_size_, elements)
631       << total_size_ << ", " << current_size_;
632   // Warning: sometimes people call this when elements == 0 and
633   // total_size_ == 0. In this case the return pointer points to a zero size
634   // array (n == 0). Hence we can just use unsafe_elements(), because the user
635   // cannot dereference the pointer anyway.
636   Element* ret = unsafe_elements() + current_size_;
637   current_size_ += elements;
638   return ret;
639 }
640 
641 template <typename Element>
Resize(int new_size,const Element & value)642 inline void RepeatedField<Element>::Resize(int new_size, const Element& value) {
643   GOOGLE_DCHECK_GE(new_size, 0);
644   if (new_size > current_size_) {
645     Reserve(new_size);
646     std::fill(&elements()[current_size_], &elements()[new_size], value);
647   }
648   current_size_ = new_size;
649 }
650 
651 template <typename Element>
Get(int index)652 inline const Element& RepeatedField<Element>::Get(int index) const {
653   GOOGLE_DCHECK_GE(index, 0);
654   GOOGLE_DCHECK_LT(index, current_size_);
655   return elements()[index];
656 }
657 
658 template <typename Element>
at(int index)659 inline const Element& RepeatedField<Element>::at(int index) const {
660   GOOGLE_CHECK_GE(index, 0);
661   GOOGLE_CHECK_LT(index, current_size_);
662   return elements()[index];
663 }
664 
665 template <typename Element>
at(int index)666 inline Element& RepeatedField<Element>::at(int index) {
667   GOOGLE_CHECK_GE(index, 0);
668   GOOGLE_CHECK_LT(index, current_size_);
669   return elements()[index];
670 }
671 
672 template <typename Element>
Mutable(int index)673 inline Element* RepeatedField<Element>::Mutable(int index) {
674   GOOGLE_DCHECK_GE(index, 0);
675   GOOGLE_DCHECK_LT(index, current_size_);
676   return &elements()[index];
677 }
678 
679 template <typename Element>
Set(int index,const Element & value)680 inline void RepeatedField<Element>::Set(int index, const Element& value) {
681   GOOGLE_DCHECK_GE(index, 0);
682   GOOGLE_DCHECK_LT(index, current_size_);
683   elements()[index] = value;
684 }
685 
686 template <typename Element>
Add(const Element & value)687 inline void RepeatedField<Element>::Add(const Element& value) {
688   uint32_t size = current_size_;
689   if (static_cast<int>(size) == total_size_) {
690     // value could reference an element of the array. Reserving new space will
691     // invalidate the reference. So we must make a copy first.
692     auto tmp = value;
693     Reserve(total_size_ + 1);
694     elements()[size] = std::move(tmp);
695   } else {
696     elements()[size] = value;
697   }
698   current_size_ = size + 1;
699 }
700 
701 template <typename Element>
Add()702 inline Element* RepeatedField<Element>::Add() {
703   uint32_t size = current_size_;
704   if (static_cast<int>(size) == total_size_) Reserve(total_size_ + 1);
705   auto ptr = &elements()[size];
706   current_size_ = size + 1;
707   return ptr;
708 }
709 
710 template <typename Element>
711 template <typename Iter>
Add(Iter begin,Iter end)712 inline void RepeatedField<Element>::Add(Iter begin, Iter end) {
713   int reserve = internal::CalculateReserve(begin, end);
714   if (reserve != -1) {
715     if (reserve == 0) {
716       return;
717     }
718 
719     Reserve(reserve + size());
720     // TODO(ckennelly):  The compiler loses track of the buffer freshly
721     // allocated by Reserve() by the time we call elements, so it cannot
722     // guarantee that elements does not alias [begin(), end()).
723     //
724     // If restrict is available, annotating the pointer obtained from elements()
725     // causes this to lower to memcpy instead of memmove.
726     std::copy(begin, end, elements() + size());
727     current_size_ = reserve + size();
728   } else {
729     FastAdder fast_adder(this);
730     for (; begin != end; ++begin) fast_adder.Add(*begin);
731   }
732 }
733 
734 template <typename Element>
RemoveLast()735 inline void RepeatedField<Element>::RemoveLast() {
736   GOOGLE_DCHECK_GT(current_size_, 0);
737   current_size_--;
738 }
739 
740 template <typename Element>
ExtractSubrange(int start,int num,Element * elements)741 void RepeatedField<Element>::ExtractSubrange(int start, int num,
742                                              Element* elements) {
743   GOOGLE_DCHECK_GE(start, 0);
744   GOOGLE_DCHECK_GE(num, 0);
745   GOOGLE_DCHECK_LE(start + num, this->current_size_);
746 
747   // Save the values of the removed elements if requested.
748   if (elements != nullptr) {
749     for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start);
750   }
751 
752   // Slide remaining elements down to fill the gap.
753   if (num > 0) {
754     for (int i = start + num; i < this->current_size_; ++i)
755       this->Set(i - num, this->Get(i));
756     this->Truncate(this->current_size_ - num);
757   }
758 }
759 
760 template <typename Element>
Clear()761 inline void RepeatedField<Element>::Clear() {
762   current_size_ = 0;
763 }
764 
765 template <typename Element>
MergeFrom(const RepeatedField & other)766 inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other) {
767   GOOGLE_DCHECK_NE(&other, this);
768   if (other.current_size_ != 0) {
769     int existing_size = size();
770     Reserve(existing_size + other.size());
771     AddNAlreadyReserved(other.size());
772     CopyArray(Mutable(existing_size), &other.Get(0), other.size());
773   }
774 }
775 
776 template <typename Element>
CopyFrom(const RepeatedField & other)777 inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other) {
778   if (&other == this) return;
779   Clear();
780   MergeFrom(other);
781 }
782 
783 template <typename Element>
784 template <typename Iter>
Assign(Iter begin,Iter end)785 inline void RepeatedField<Element>::Assign(Iter begin, Iter end) {
786   Clear();
787   Add(begin, end);
788 }
789 
790 template <typename Element>
erase(const_iterator position)791 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
792     const_iterator position) {
793   return erase(position, position + 1);
794 }
795 
796 template <typename Element>
erase(const_iterator first,const_iterator last)797 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
798     const_iterator first, const_iterator last) {
799   size_type first_offset = first - cbegin();
800   if (first != last) {
801     Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin());
802   }
803   return begin() + first_offset;
804 }
805 
806 template <typename Element>
mutable_data()807 inline Element* RepeatedField<Element>::mutable_data() {
808   return unsafe_elements();
809 }
810 
811 template <typename Element>
data()812 inline const Element* RepeatedField<Element>::data() const {
813   return unsafe_elements();
814 }
815 
816 template <typename Element>
InternalSwap(RepeatedField * other)817 inline void RepeatedField<Element>::InternalSwap(RepeatedField* other) {
818   GOOGLE_DCHECK(this != other);
819 
820   // Swap all fields at once.
821   static_assert(std::is_standard_layout<RepeatedField<Element>>::value,
822                 "offsetof() requires standard layout before c++17");
823   internal::memswap<offsetof(RepeatedField, arena_or_elements_) +
824                     sizeof(this->arena_or_elements_) -
825                     offsetof(RepeatedField, current_size_)>(
826       reinterpret_cast<char*>(this) + offsetof(RepeatedField, current_size_),
827       reinterpret_cast<char*>(other) + offsetof(RepeatedField, current_size_));
828 }
829 
830 template <typename Element>
Swap(RepeatedField * other)831 void RepeatedField<Element>::Swap(RepeatedField* other) {
832   if (this == other) return;
833 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
834   if (GetArena() != nullptr && GetArena() == other->GetArena()) {
835 #else   // PROTOBUF_FORCE_COPY_IN_SWAP
836   if (GetArena() == other->GetArena()) {
837 #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
838     InternalSwap(other);
839   } else {
840     RepeatedField<Element> temp(other->GetArena());
841     temp.MergeFrom(*this);
842     CopyFrom(*other);
843     other->UnsafeArenaSwap(&temp);
844   }
845 }
846 
847 template <typename Element>
848 void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other) {
849   if (this == other) return;
850   GOOGLE_DCHECK_EQ(GetArena(), other->GetArena());
851   InternalSwap(other);
852 }
853 
854 template <typename Element>
855 void RepeatedField<Element>::SwapElements(int index1, int index2) {
856   using std::swap;  // enable ADL with fallback
857   swap(elements()[index1], elements()[index2]);
858 }
859 
860 template <typename Element>
861 inline typename RepeatedField<Element>::iterator
862 RepeatedField<Element>::begin() {
863   return iterator(unsafe_elements());
864 }
865 template <typename Element>
866 inline typename RepeatedField<Element>::const_iterator
867 RepeatedField<Element>::begin() const {
868   return const_iterator(unsafe_elements());
869 }
870 template <typename Element>
871 inline typename RepeatedField<Element>::const_iterator
872 RepeatedField<Element>::cbegin() const {
873   return const_iterator(unsafe_elements());
874 }
875 template <typename Element>
876 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end() {
877   return iterator(unsafe_elements() + current_size_);
878 }
879 template <typename Element>
880 inline typename RepeatedField<Element>::const_iterator
881 RepeatedField<Element>::end() const {
882   return const_iterator(unsafe_elements() + current_size_);
883 }
884 template <typename Element>
885 inline typename RepeatedField<Element>::const_iterator
886 RepeatedField<Element>::cend() const {
887   return const_iterator(unsafe_elements() + current_size_);
888 }
889 
890 template <typename Element>
891 inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
892   return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0;
893 }
894 
895 namespace internal {
896 // Returns the new size for a reserved field based on its 'total_size' and the
897 // requested 'new_size'. The result is clamped to the closed interval:
898 //   [internal::kMinRepeatedFieldAllocationSize,
899 //    std::numeric_limits<int>::max()]
900 // Requires:
901 //     new_size > total_size &&
902 //     (total_size == 0 ||
903 //      total_size >= kRepeatedFieldLowerClampLimit)
904 template <typename T, int kRepHeaderSize>
905 inline int CalculateReserveSize(int total_size, int new_size) {
906   constexpr int lower_limit = RepeatedFieldLowerClampLimit<T, kRepHeaderSize>();
907   if (new_size < lower_limit) {
908     // Clamp to smallest allowed size.
909     return lower_limit;
910   }
911   constexpr int kMaxSizeBeforeClamp =
912       (std::numeric_limits<int>::max() - kRepHeaderSize) / 2;
913   if (PROTOBUF_PREDICT_FALSE(total_size > kMaxSizeBeforeClamp)) {
914     return std::numeric_limits<int>::max();
915   }
916   // We want to double the number of bytes, not the number of elements, to try
917   // to stay within power-of-two allocations.
918   // The allocation has kRepHeaderSize + sizeof(T) * capacity.
919   int doubled_size = 2 * total_size + kRepHeaderSize / sizeof(T);
920   return std::max(doubled_size, new_size);
921 }
922 }  // namespace internal
923 
924 // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
925 // amount of code bloat.
926 template <typename Element>
927 void RepeatedField<Element>::Reserve(int new_size) {
928   if (total_size_ >= new_size) return;
929   Rep* old_rep = total_size_ > 0 ? rep() : nullptr;
930   Rep* new_rep;
931   Arena* arena = GetArena();
932 
933   new_size = internal::CalculateReserveSize<Element, kRepHeaderSize>(
934       total_size_, new_size);
935 
936   GOOGLE_DCHECK_LE(
937       static_cast<size_t>(new_size),
938       (std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element))
939       << "Requested size is too large to fit into size_t.";
940   size_t bytes =
941       kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
942   if (arena == nullptr) {
943     new_rep = static_cast<Rep*>(::operator new(bytes));
944   } else {
945     new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
946   }
947   new_rep->arena = arena;
948   int old_total_size = total_size_;
949   // Already known: new_size >= internal::kMinRepeatedFieldAllocationSize
950   // Maintain invariant:
951   //     total_size_ == 0 ||
952   //     total_size_ >= internal::kMinRepeatedFieldAllocationSize
953   total_size_ = new_size;
954   arena_or_elements_ = new_rep->elements();
955   // Invoke placement-new on newly allocated elements. We shouldn't have to do
956   // this, since Element is supposed to be POD, but a previous version of this
957   // code allocated storage with "new Element[size]" and some code uses
958   // RepeatedField with non-POD types, relying on constructor invocation. If
959   // Element has a trivial constructor (e.g., int32_t), gcc (tested with -O2)
960   // completely removes this loop because the loop body is empty, so this has no
961   // effect unless its side-effects are required for correctness.
962   // Note that we do this before MoveArray() below because Element's copy
963   // assignment implementation will want an initialized instance first.
964   Element* e = &elements()[0];
965   Element* limit = e + total_size_;
966   for (; e < limit; e++) {
967     new (e) Element;
968   }
969   if (current_size_ > 0) {
970     MoveArray(&elements()[0], old_rep->elements(), current_size_);
971   }
972 
973   // Likewise, we need to invoke destructors on the old array.
974   InternalDeallocate(old_rep, old_total_size, false);
975 
976 }
977 
978 template <typename Element>
979 inline void RepeatedField<Element>::Truncate(int new_size) {
980   GOOGLE_DCHECK_LE(new_size, current_size_);
981   if (current_size_ > 0) {
982     current_size_ = new_size;
983   }
984 }
985 
986 template <typename Element>
987 inline void RepeatedField<Element>::MoveArray(Element* to, Element* from,
988                                               int array_size) {
989   CopyArray(to, from, array_size);
990 }
991 
992 template <typename Element>
993 inline void RepeatedField<Element>::CopyArray(Element* to, const Element* from,
994                                               int array_size) {
995   internal::ElementCopier<Element>()(to, from, array_size);
996 }
997 
998 namespace internal {
999 
1000 template <typename Element, bool HasTrivialCopy>
1001 void ElementCopier<Element, HasTrivialCopy>::operator()(Element* to,
1002                                                         const Element* from,
1003                                                         int array_size) {
1004   std::copy(from, from + array_size, to);
1005 }
1006 
1007 template <typename Element>
1008 struct ElementCopier<Element, true> {
1009   void operator()(Element* to, const Element* from, int array_size) {
1010     memcpy(to, from, static_cast<size_t>(array_size) * sizeof(Element));
1011   }
1012 };
1013 
1014 }  // namespace internal
1015 
1016 
1017 // -------------------------------------------------------------------
1018 
1019 // Iterators and helper functions that follow the spirit of the STL
1020 // std::back_insert_iterator and std::back_inserter but are tailor-made
1021 // for RepeatedField and RepeatedPtrField. Typical usage would be:
1022 //
1023 //   std::copy(some_sequence.begin(), some_sequence.end(),
1024 //             RepeatedFieldBackInserter(proto.mutable_sequence()));
1025 //
1026 // Ported by johannes from util/gtl/proto-array-iterators.h
1027 
1028 namespace internal {
1029 
1030 // STL-like iterator implementation for RepeatedField.  You should not
1031 // refer to this class directly; use RepeatedField<T>::iterator instead.
1032 //
1033 // Note: All of the iterator operators *must* be inlined to avoid performance
1034 // regressions.  This is caused by the extern template declarations below (which
1035 // are required because of the RepeatedField extern template declarations).  If
1036 // any of these functions aren't explicitly inlined (e.g. defined in the class),
1037 // the compiler isn't allowed to inline them.
1038 template <typename Element>
1039 class RepeatedIterator {
1040  public:
1041   using iterator_category = std::random_access_iterator_tag;
1042   // Note: remove_const is necessary for std::partial_sum, which uses value_type
1043   // to determine the summation variable type.
1044   using value_type = typename std::remove_const<Element>::type;
1045   using difference_type = std::ptrdiff_t;
1046   using pointer = Element*;
1047   using reference = Element&;
1048 
1049   constexpr RepeatedIterator() noexcept : it_(nullptr) {}
1050 
1051   // Allows "upcasting" from RepeatedIterator<T**> to
1052   // RepeatedIterator<const T*const*>.
1053   template <typename OtherElement,
1054             typename std::enable_if<std::is_convertible<
1055                 OtherElement*, pointer>::value>::type* = nullptr>
1056   constexpr RepeatedIterator(
1057       const RepeatedIterator<OtherElement>& other) noexcept
1058       : it_(other.it_) {}
1059 
1060   // dereferenceable
1061   constexpr reference operator*() const noexcept { return *it_; }
1062   constexpr pointer operator->() const noexcept { return it_; }
1063 
1064  private:
1065   // Helper alias to hide the internal type.
1066   using iterator = RepeatedIterator<Element>;
1067 
1068  public:
1069   // {inc,dec}rementable
1070   iterator& operator++() noexcept {
1071     ++it_;
1072     return *this;
1073   }
1074   iterator operator++(int) noexcept { return iterator(it_++); }
1075   iterator& operator--() noexcept {
1076     --it_;
1077     return *this;
1078   }
1079   iterator operator--(int) noexcept { return iterator(it_--); }
1080 
1081   // equality_comparable
1082   friend constexpr bool operator==(const iterator& x,
1083                                    const iterator& y) noexcept {
1084     return x.it_ == y.it_;
1085   }
1086   friend constexpr bool operator!=(const iterator& x,
1087                                    const iterator& y) noexcept {
1088     return x.it_ != y.it_;
1089   }
1090 
1091   // less_than_comparable
1092   friend constexpr bool operator<(const iterator& x,
1093                                   const iterator& y) noexcept {
1094     return x.it_ < y.it_;
1095   }
1096   friend constexpr bool operator<=(const iterator& x,
1097                                    const iterator& y) noexcept {
1098     return x.it_ <= y.it_;
1099   }
1100   friend constexpr bool operator>(const iterator& x,
1101                                   const iterator& y) noexcept {
1102     return x.it_ > y.it_;
1103   }
1104   friend constexpr bool operator>=(const iterator& x,
1105                                    const iterator& y) noexcept {
1106     return x.it_ >= y.it_;
1107   }
1108 
1109   // addable, subtractable
1110   iterator& operator+=(difference_type d) noexcept {
1111     it_ += d;
1112     return *this;
1113   }
1114   constexpr iterator operator+(difference_type d) const noexcept {
1115     return iterator(it_ + d);
1116   }
1117   friend constexpr iterator operator+(const difference_type d,
1118                                       iterator it) noexcept {
1119     return it + d;
1120   }
1121 
1122   iterator& operator-=(difference_type d) noexcept {
1123     it_ -= d;
1124     return *this;
1125   }
1126   iterator constexpr operator-(difference_type d) const noexcept {
1127     return iterator(it_ - d);
1128   }
1129 
1130   // indexable
1131   constexpr reference operator[](difference_type d) const noexcept {
1132     return it_[d];
1133   }
1134 
1135   // random access iterator
1136   friend constexpr difference_type operator-(iterator it1,
1137                                              iterator it2) noexcept {
1138     return it1.it_ - it2.it_;
1139   }
1140 
1141  private:
1142   template <typename OtherElement>
1143   friend class RepeatedIterator;
1144 
1145   // Allow construction from RepeatedField.
1146   friend class RepeatedField<value_type>;
1147   explicit RepeatedIterator(Element* it) noexcept : it_(it) {}
1148 
1149   // The internal iterator.
1150   Element* it_;
1151 };
1152 
1153 // A back inserter for RepeatedField objects.
1154 template <typename T>
1155 class RepeatedFieldBackInsertIterator {
1156  public:
1157   using iterator_category = std::output_iterator_tag;
1158   using value_type = T;
1159   using pointer = void;
1160   using reference = void;
1161   using difference_type = std::ptrdiff_t;
1162 
1163   explicit RepeatedFieldBackInsertIterator(
1164       RepeatedField<T>* const mutable_field)
1165       : field_(mutable_field) {}
1166   RepeatedFieldBackInsertIterator<T>& operator=(const T& value) {
1167     field_->Add(value);
1168     return *this;
1169   }
1170   RepeatedFieldBackInsertIterator<T>& operator*() { return *this; }
1171   RepeatedFieldBackInsertIterator<T>& operator++() { return *this; }
1172   RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */) {
1173     return *this;
1174   }
1175 
1176  private:
1177   RepeatedField<T>* field_;
1178 };
1179 
1180 }  // namespace internal
1181 
1182 // Provides a back insert iterator for RepeatedField instances,
1183 // similar to std::back_inserter().
1184 template <typename T>
1185 internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
1186     RepeatedField<T>* const mutable_field) {
1187   return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
1188 }
1189 
1190 // Extern declarations of common instantiations to reduce library bloat.
1191 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<bool>;
1192 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int32_t>;
1193 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint32_t>;
1194 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int64_t>;
1195 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint64_t>;
1196 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<float>;
1197 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<double>;
1198 
1199 namespace internal {
1200 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<bool>;
1201 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1202     RepeatedIterator<int32_t>;
1203 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1204     RepeatedIterator<uint32_t>;
1205 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1206     RepeatedIterator<int64_t>;
1207 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1208     RepeatedIterator<uint64_t>;
1209 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<float>;
1210 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<double>;
1211 }  // namespace internal
1212 
1213 }  // namespace protobuf
1214 }  // namespace google
1215 
1216 #include <google/protobuf/port_undef.inc>
1217 
1218 #endif  // GOOGLE_PROTOBUF_REPEATED_FIELD_H__
1219