1 // Protocol Buffers - Google's data interchange format 2 // Copyright 2008 Google Inc. All rights reserved. 3 // https://developers.google.com/protocol-buffers/ 4 // 5 // Redistribution and use in source and binary forms, with or without 6 // modification, are permitted provided that the following conditions are 7 // met: 8 // 9 // * Redistributions of source code must retain the above copyright 10 // notice, this list of conditions and the following disclaimer. 11 // * Redistributions in binary form must reproduce the above 12 // copyright notice, this list of conditions and the following disclaimer 13 // in the documentation and/or other materials provided with the 14 // distribution. 15 // * Neither the name of Google Inc. nor the names of its 16 // contributors may be used to endorse or promote products derived from 17 // this software without specific prior written permission. 18 // 19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 31 // Author: kenton@google.com (Kenton Varda) 32 // Based on original Protocol Buffers design by 33 // Sanjay Ghemawat, Jeff Dean, and others. 34 // 35 // RepeatedField and RepeatedPtrField are used by generated protocol message 36 // classes to manipulate repeated fields. These classes are very similar to 37 // STL's vector, but include a number of optimizations found to be useful 38 // specifically in the case of Protocol Buffers. RepeatedPtrField is 39 // particularly different from STL vector as it manages ownership of the 40 // pointers that it contains. 41 // 42 // This header covers RepeatedPtrField. 43 44 // IWYU pragma: private, include "net/proto2/public/repeated_field.h" 45 46 #ifndef GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__ 47 #define GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__ 48 49 #include <utility> 50 51 #ifdef _MSC_VER 52 // This is required for min/max on VS2013 only. 53 #include <algorithm> 54 #endif 55 56 #include <iterator> 57 #include <limits> 58 #include <string> 59 #include <type_traits> 60 61 #include <google/protobuf/stubs/logging.h> 62 #include <google/protobuf/stubs/common.h> 63 #include <google/protobuf/arena.h> 64 #include <google/protobuf/port.h> 65 #include <google/protobuf/message_lite.h> 66 67 68 // Must be included last. 69 #include <google/protobuf/port_def.inc> 70 71 #ifdef SWIG 72 #error "You cannot SWIG proto headers" 73 #endif 74 75 namespace google { 76 namespace protobuf { 77 78 class Message; 79 class Reflection; 80 81 template <typename T> 82 struct WeakRepeatedPtrField; 83 84 namespace internal { 85 86 class MergePartialFromCodedStreamHelper; 87 class SwapFieldHelper; 88 89 90 } // namespace internal 91 92 namespace internal { 93 template <typename It> 94 class RepeatedPtrIterator; 95 template <typename It, typename VoidPtr> 96 class RepeatedPtrOverPtrsIterator; 97 } // namespace internal 98 99 namespace internal { 100 101 // type-traits helper for RepeatedPtrFieldBase: we only want to invoke 102 // arena-related "copy if on different arena" behavior if the necessary methods 103 // exist on the contained type. In particular, we rely on MergeFrom() existing 104 // as a general proxy for the fact that a copy will work, and we also provide a 105 // specific override for std::string*. 106 template <typename T> 107 struct TypeImplementsMergeBehaviorProbeForMergeFrom { 108 typedef char HasMerge; 109 typedef long HasNoMerge; 110 111 // We accept either of: 112 // - void MergeFrom(const T& other) 113 // - bool MergeFrom(const T& other) 114 // 115 // We mangle these names a bit to avoid compatibility issues in 'unclean' 116 // include environments that may have, e.g., "#define test ..." (yes, this 117 // exists). 118 template <typename U, typename RetType, RetType (U::*)(const U& arg)> 119 struct CheckType; 120 template <typename U> 121 static HasMerge Check(CheckType<U, void, &U::MergeFrom>*); 122 template <typename U> 123 static HasMerge Check(CheckType<U, bool, &U::MergeFrom>*); 124 template <typename U> 125 static HasNoMerge Check(...); 126 127 // Resolves to either std::true_type or std::false_type. 128 typedef std::integral_constant<bool, 129 (sizeof(Check<T>(0)) == sizeof(HasMerge))> 130 type; 131 }; 132 133 template <typename T, typename = void> 134 struct TypeImplementsMergeBehavior 135 : TypeImplementsMergeBehaviorProbeForMergeFrom<T> {}; 136 137 138 template <> 139 struct TypeImplementsMergeBehavior<std::string> { 140 typedef std::true_type type; 141 }; 142 143 template <typename T> 144 struct IsMovable 145 : std::integral_constant<bool, std::is_move_constructible<T>::value && 146 std::is_move_assignable<T>::value> {}; 147 148 // This is the common base class for RepeatedPtrFields. It deals only in void* 149 // pointers. Users should not use this interface directly. 150 // 151 // The methods of this interface correspond to the methods of RepeatedPtrField, 152 // but may have a template argument called TypeHandler. Its signature is: 153 // class TypeHandler { 154 // public: 155 // typedef MyType Type; 156 // static Type* New(); 157 // static Type* NewFromPrototype(const Type* prototype, 158 // Arena* arena); 159 // static void Delete(Type*); 160 // static void Clear(Type*); 161 // static void Merge(const Type& from, Type* to); 162 // 163 // // Only needs to be implemented if SpaceUsedExcludingSelf() is called. 164 // static int SpaceUsedLong(const Type&); 165 // }; 166 class PROTOBUF_EXPORT RepeatedPtrFieldBase { 167 protected: 168 constexpr RepeatedPtrFieldBase() 169 : arena_(nullptr), current_size_(0), total_size_(0), rep_(nullptr) {} 170 explicit RepeatedPtrFieldBase(Arena* arena) 171 : arena_(arena), current_size_(0), total_size_(0), rep_(nullptr) {} 172 173 RepeatedPtrFieldBase(const RepeatedPtrFieldBase&) = delete; 174 RepeatedPtrFieldBase& operator=(const RepeatedPtrFieldBase&) = delete; 175 176 ~RepeatedPtrFieldBase() { 177 #ifndef NDEBUG 178 // Try to trigger segfault / asan failure in non-opt builds. If arena_ 179 // lifetime has ended before the destructor. 180 if (arena_) (void)arena_->SpaceAllocated(); 181 #endif 182 } 183 184 bool empty() const { return current_size_ == 0; } 185 int size() const { return current_size_; } 186 int Capacity() const { return total_size_; } 187 188 template <typename TypeHandler> 189 const typename TypeHandler::Type& at(int index) const { 190 GOOGLE_CHECK_GE(index, 0); 191 GOOGLE_CHECK_LT(index, current_size_); 192 return *cast<TypeHandler>(rep_->elements[index]); 193 } 194 195 template <typename TypeHandler> 196 typename TypeHandler::Type& at(int index) { 197 GOOGLE_CHECK_GE(index, 0); 198 GOOGLE_CHECK_LT(index, current_size_); 199 return *cast<TypeHandler>(rep_->elements[index]); 200 } 201 202 template <typename TypeHandler> 203 typename TypeHandler::Type* Mutable(int index) { 204 GOOGLE_DCHECK_GE(index, 0); 205 GOOGLE_DCHECK_LT(index, current_size_); 206 return cast<TypeHandler>(rep_->elements[index]); 207 } 208 209 template <typename TypeHandler> 210 typename TypeHandler::Type* Add( 211 const typename TypeHandler::Type* prototype = nullptr) { 212 if (rep_ != nullptr && current_size_ < rep_->allocated_size) { 213 return cast<TypeHandler>(rep_->elements[current_size_++]); 214 } 215 typename TypeHandler::Type* result = 216 TypeHandler::NewFromPrototype(prototype, arena_); 217 return reinterpret_cast<typename TypeHandler::Type*>( 218 AddOutOfLineHelper(result)); 219 } 220 221 template < 222 typename TypeHandler, 223 typename std::enable_if<TypeHandler::Movable::value>::type* = nullptr> 224 inline void Add(typename TypeHandler::Type&& value) { 225 if (rep_ != nullptr && current_size_ < rep_->allocated_size) { 226 *cast<TypeHandler>(rep_->elements[current_size_++]) = std::move(value); 227 return; 228 } 229 if (!rep_ || rep_->allocated_size == total_size_) { 230 Reserve(total_size_ + 1); 231 } 232 ++rep_->allocated_size; 233 typename TypeHandler::Type* result = 234 TypeHandler::New(arena_, std::move(value)); 235 rep_->elements[current_size_++] = result; 236 } 237 238 template <typename TypeHandler> 239 void Delete(int index) { 240 GOOGLE_DCHECK_GE(index, 0); 241 GOOGLE_DCHECK_LT(index, current_size_); 242 TypeHandler::Delete(cast<TypeHandler>(rep_->elements[index]), arena_); 243 } 244 245 // Must be called from destructor. 246 template <typename TypeHandler> 247 void Destroy() { 248 if (rep_ != nullptr && arena_ == nullptr) { 249 int n = rep_->allocated_size; 250 void* const* elements = rep_->elements; 251 for (int i = 0; i < n; i++) { 252 TypeHandler::Delete(cast<TypeHandler>(elements[i]), nullptr); 253 } 254 const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize; 255 internal::SizedDelete(rep_, size); 256 } 257 rep_ = nullptr; 258 } 259 260 bool NeedsDestroy() const { return rep_ != nullptr && arena_ == nullptr; } 261 void DestroyProtos(); // implemented in the cc file 262 263 public: 264 // The next few methods are public so that they can be called from generated 265 // code when implicit weak fields are used, but they should never be called by 266 // application code. 267 268 template <typename TypeHandler> 269 const typename TypeHandler::Type& Get(int index) const { 270 GOOGLE_DCHECK_GE(index, 0); 271 GOOGLE_DCHECK_LT(index, current_size_); 272 return *cast<TypeHandler>(rep_->elements[index]); 273 } 274 275 // Creates and adds an element using the given prototype, without introducing 276 // a link-time dependency on the concrete message type. This method is used to 277 // implement implicit weak fields. The prototype may be nullptr, in which case 278 // an ImplicitWeakMessage will be used as a placeholder. 279 MessageLite* AddWeak(const MessageLite* prototype); 280 281 template <typename TypeHandler> 282 void Clear() { 283 const int n = current_size_; 284 GOOGLE_DCHECK_GE(n, 0); 285 if (n > 0) { 286 void* const* elements = rep_->elements; 287 int i = 0; 288 do { 289 TypeHandler::Clear(cast<TypeHandler>(elements[i++])); 290 } while (i < n); 291 current_size_ = 0; 292 } 293 } 294 295 template <typename TypeHandler> 296 void MergeFrom(const RepeatedPtrFieldBase& other) { 297 // To avoid unnecessary code duplication and reduce binary size, we use a 298 // layered approach to implementing MergeFrom(). The toplevel method is 299 // templated, so we get a small thunk per concrete message type in the 300 // binary. This calls a shared implementation with most of the logic, 301 // passing a function pointer to another type-specific piece of code that 302 // calls the object-allocate and merge handlers. 303 GOOGLE_DCHECK_NE(&other, this); 304 if (other.current_size_ == 0) return; 305 MergeFromInternal(other, 306 &RepeatedPtrFieldBase::MergeFromInnerLoop<TypeHandler>); 307 } 308 309 inline void InternalSwap(RepeatedPtrFieldBase* rhs) { 310 GOOGLE_DCHECK(this != rhs); 311 312 // Swap all fields at once. 313 auto temp = std::make_tuple(rhs->arena_, rhs->current_size_, 314 rhs->total_size_, rhs->rep_); 315 std::tie(rhs->arena_, rhs->current_size_, rhs->total_size_, rhs->rep_) = 316 std::make_tuple(arena_, current_size_, total_size_, rep_); 317 std::tie(arena_, current_size_, total_size_, rep_) = temp; 318 } 319 320 protected: 321 template <typename TypeHandler> 322 void RemoveLast() { 323 GOOGLE_DCHECK_GT(current_size_, 0); 324 TypeHandler::Clear(cast<TypeHandler>(rep_->elements[--current_size_])); 325 } 326 327 template <typename TypeHandler> 328 void CopyFrom(const RepeatedPtrFieldBase& other) { 329 if (&other == this) return; 330 RepeatedPtrFieldBase::Clear<TypeHandler>(); 331 RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other); 332 } 333 334 void CloseGap(int start, int num); // implemented in the cc file 335 336 void Reserve(int new_size); // implemented in the cc file 337 338 template <typename TypeHandler> 339 static inline typename TypeHandler::Type* copy( 340 typename TypeHandler::Type* value) { 341 auto* new_value = TypeHandler::NewFromPrototype(value, nullptr); 342 TypeHandler::Merge(*value, new_value); 343 return new_value; 344 } 345 346 // Used for constructing iterators. 347 void* const* raw_data() const { return rep_ ? rep_->elements : nullptr; } 348 void** raw_mutable_data() const { 349 return rep_ ? const_cast<void**>(rep_->elements) : nullptr; 350 } 351 352 template <typename TypeHandler> 353 typename TypeHandler::Type** mutable_data() { 354 // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this 355 // method entirely. 356 return reinterpret_cast<typename TypeHandler::Type**>(raw_mutable_data()); 357 } 358 359 template <typename TypeHandler> 360 const typename TypeHandler::Type* const* data() const { 361 // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this 362 // method entirely. 363 return reinterpret_cast<const typename TypeHandler::Type* const*>( 364 raw_data()); 365 } 366 367 template <typename TypeHandler> 368 PROTOBUF_NDEBUG_INLINE void Swap(RepeatedPtrFieldBase* other) { 369 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP 370 if (GetArena() != nullptr && GetArena() == other->GetArena()) 371 #else // PROTOBUF_FORCE_COPY_IN_SWAP 372 if (GetArena() == other->GetArena()) 373 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP 374 { 375 InternalSwap(other); 376 } else { 377 SwapFallback<TypeHandler>(other); 378 } 379 } 380 381 void SwapElements(int index1, int index2) { 382 using std::swap; // enable ADL with fallback 383 swap(rep_->elements[index1], rep_->elements[index2]); 384 } 385 386 template <typename TypeHandler> 387 size_t SpaceUsedExcludingSelfLong() const { 388 size_t allocated_bytes = static_cast<size_t>(total_size_) * sizeof(void*); 389 if (rep_ != nullptr) { 390 for (int i = 0; i < rep_->allocated_size; ++i) { 391 allocated_bytes += 392 TypeHandler::SpaceUsedLong(*cast<TypeHandler>(rep_->elements[i])); 393 } 394 allocated_bytes += kRepHeaderSize; 395 } 396 return allocated_bytes; 397 } 398 399 // Advanced memory management -------------------------------------- 400 401 // Like Add(), but if there are no cleared objects to use, returns nullptr. 402 template <typename TypeHandler> 403 typename TypeHandler::Type* AddFromCleared() { 404 if (rep_ != nullptr && current_size_ < rep_->allocated_size) { 405 return cast<TypeHandler>(rep_->elements[current_size_++]); 406 } else { 407 return nullptr; 408 } 409 } 410 411 template <typename TypeHandler> 412 void AddAllocated(typename TypeHandler::Type* value) { 413 typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t; 414 AddAllocatedInternal<TypeHandler>(value, t); 415 } 416 417 template <typename TypeHandler> 418 void UnsafeArenaAddAllocated(typename TypeHandler::Type* value) { 419 // Make room for the new pointer. 420 if (!rep_ || current_size_ == total_size_) { 421 // The array is completely full with no cleared objects, so grow it. 422 Reserve(total_size_ + 1); 423 ++rep_->allocated_size; 424 } else if (rep_->allocated_size == total_size_) { 425 // There is no more space in the pointer array because it contains some 426 // cleared objects awaiting reuse. We don't want to grow the array in 427 // this case because otherwise a loop calling AddAllocated() followed by 428 // Clear() would leak memory. 429 TypeHandler::Delete(cast<TypeHandler>(rep_->elements[current_size_]), 430 arena_); 431 } else if (current_size_ < rep_->allocated_size) { 432 // We have some cleared objects. We don't care about their order, so we 433 // can just move the first one to the end to make space. 434 rep_->elements[rep_->allocated_size] = rep_->elements[current_size_]; 435 ++rep_->allocated_size; 436 } else { 437 // There are no cleared objects. 438 ++rep_->allocated_size; 439 } 440 441 rep_->elements[current_size_++] = value; 442 } 443 444 template <typename TypeHandler> 445 PROTOBUF_NODISCARD typename TypeHandler::Type* ReleaseLast() { 446 typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t; 447 return ReleaseLastInternal<TypeHandler>(t); 448 } 449 450 // Releases and returns the last element, but does not do out-of-arena copy. 451 // Instead, just returns the raw pointer to the contained element in the 452 // arena. 453 template <typename TypeHandler> 454 typename TypeHandler::Type* UnsafeArenaReleaseLast() { 455 GOOGLE_DCHECK_GT(current_size_, 0); 456 typename TypeHandler::Type* result = 457 cast<TypeHandler>(rep_->elements[--current_size_]); 458 --rep_->allocated_size; 459 if (current_size_ < rep_->allocated_size) { 460 // There are cleared elements on the end; replace the removed element 461 // with the last allocated element. 462 rep_->elements[current_size_] = rep_->elements[rep_->allocated_size]; 463 } 464 return result; 465 } 466 467 int ClearedCount() const { 468 return rep_ ? (rep_->allocated_size - current_size_) : 0; 469 } 470 471 template <typename TypeHandler> 472 void AddCleared(typename TypeHandler::Type* value) { 473 GOOGLE_DCHECK(GetArena() == nullptr) << "AddCleared() can only be used on a " 474 "RepeatedPtrField not on an arena."; 475 GOOGLE_DCHECK(TypeHandler::GetOwningArena(value) == nullptr) 476 << "AddCleared() can only accept values not on an arena."; 477 if (!rep_ || rep_->allocated_size == total_size_) { 478 Reserve(total_size_ + 1); 479 } 480 rep_->elements[rep_->allocated_size++] = value; 481 } 482 483 template <typename TypeHandler> 484 PROTOBUF_NODISCARD typename TypeHandler::Type* ReleaseCleared() { 485 GOOGLE_DCHECK(GetArena() == nullptr) 486 << "ReleaseCleared() can only be used on a RepeatedPtrField not on " 487 << "an arena."; 488 GOOGLE_DCHECK(GetArena() == nullptr); 489 GOOGLE_DCHECK(rep_ != nullptr); 490 GOOGLE_DCHECK_GT(rep_->allocated_size, current_size_); 491 return cast<TypeHandler>(rep_->elements[--rep_->allocated_size]); 492 } 493 494 template <typename TypeHandler> 495 void AddAllocatedInternal(typename TypeHandler::Type* value, std::true_type) { 496 // AddAllocated version that implements arena-safe copying behavior. 497 Arena* element_arena = 498 reinterpret_cast<Arena*>(TypeHandler::GetOwningArena(value)); 499 Arena* arena = GetArena(); 500 if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) { 501 // Fast path: underlying arena representation (tagged pointer) is equal to 502 // our arena pointer, and we can add to array without resizing it (at 503 // least one slot that is not allocated). 504 void** elems = rep_->elements; 505 if (current_size_ < rep_->allocated_size) { 506 // Make space at [current] by moving first allocated element to end of 507 // allocated list. 508 elems[rep_->allocated_size] = elems[current_size_]; 509 } 510 elems[current_size_] = value; 511 current_size_ = current_size_ + 1; 512 rep_->allocated_size = rep_->allocated_size + 1; 513 } else { 514 AddAllocatedSlowWithCopy<TypeHandler>(value, element_arena, arena); 515 } 516 } 517 518 template <typename TypeHandler> 519 void AddAllocatedInternal( 520 // AddAllocated version that does not implement arena-safe copying 521 // behavior. 522 typename TypeHandler::Type* value, std::false_type) { 523 if (rep_ && rep_->allocated_size < total_size_) { 524 // Fast path: underlying arena representation (tagged pointer) is equal to 525 // our arena pointer, and we can add to array without resizing it (at 526 // least one slot that is not allocated). 527 void** elems = rep_->elements; 528 if (current_size_ < rep_->allocated_size) { 529 // Make space at [current] by moving first allocated element to end of 530 // allocated list. 531 elems[rep_->allocated_size] = elems[current_size_]; 532 } 533 elems[current_size_] = value; 534 current_size_ = current_size_ + 1; 535 ++rep_->allocated_size; 536 } else { 537 UnsafeArenaAddAllocated<TypeHandler>(value); 538 } 539 } 540 541 // Slowpath handles all cases, copying if necessary. 542 template <typename TypeHandler> 543 PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy( 544 // Pass value_arena and my_arena to avoid duplicate virtual call (value) 545 // or load (mine). 546 typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena) { 547 // Ensure that either the value is in the same arena, or if not, we do the 548 // appropriate thing: Own() it (if it's on heap and we're in an arena) or 549 // copy it to our arena/heap (otherwise). 550 if (my_arena != nullptr && value_arena == nullptr) { 551 my_arena->Own(value); 552 } else if (my_arena != value_arena) { 553 typename TypeHandler::Type* new_value = 554 TypeHandler::NewFromPrototype(value, my_arena); 555 TypeHandler::Merge(*value, new_value); 556 TypeHandler::Delete(value, value_arena); 557 value = new_value; 558 } 559 560 UnsafeArenaAddAllocated<TypeHandler>(value); 561 } 562 563 template <typename TypeHandler> 564 typename TypeHandler::Type* ReleaseLastInternal(std::true_type) { 565 // ReleaseLast() for types that implement merge/copy behavior. 566 // First, release an element. 567 typename TypeHandler::Type* result = UnsafeArenaReleaseLast<TypeHandler>(); 568 // Now perform a copy if we're on an arena. 569 Arena* arena = GetArena(); 570 571 typename TypeHandler::Type* new_result; 572 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE 573 new_result = copy<TypeHandler>(result); 574 if (arena == nullptr) delete result; 575 #else // PROTOBUF_FORCE_COPY_IN_RELEASE 576 new_result = (arena == nullptr) ? result : copy<TypeHandler>(result); 577 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE 578 return new_result; 579 } 580 581 template <typename TypeHandler> 582 typename TypeHandler::Type* ReleaseLastInternal(std::false_type) { 583 // ReleaseLast() for types that *do not* implement merge/copy behavior -- 584 // this is the same as UnsafeArenaReleaseLast(). Note that we GOOGLE_DCHECK-fail if 585 // we're on an arena, since the user really should implement the copy 586 // operation in this case. 587 GOOGLE_DCHECK(GetArena() == nullptr) 588 << "ReleaseLast() called on a RepeatedPtrField that is on an arena, " 589 << "with a type that does not implement MergeFrom. This is unsafe; " 590 << "please implement MergeFrom for your type."; 591 return UnsafeArenaReleaseLast<TypeHandler>(); 592 } 593 594 template <typename TypeHandler> 595 PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other) { 596 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP 597 GOOGLE_DCHECK(GetArena() == nullptr || other->GetArena() != GetArena()); 598 #else // PROTOBUF_FORCE_COPY_IN_SWAP 599 GOOGLE_DCHECK(other->GetArena() != GetArena()); 600 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP 601 602 // Copy semantics in this case. We try to improve efficiency by placing the 603 // temporary on |other|'s arena so that messages are copied twice rather 604 // than three times. 605 RepeatedPtrFieldBase temp(other->GetArena()); 606 temp.MergeFrom<TypeHandler>(*this); 607 this->Clear<TypeHandler>(); 608 this->MergeFrom<TypeHandler>(*other); 609 other->InternalSwap(&temp); 610 temp.Destroy<TypeHandler>(); // Frees rep_ if `other` had no arena. 611 } 612 613 inline Arena* GetArena() const { return arena_; } 614 615 private: 616 template <typename T> friend class Arena::InternalHelper; 617 618 inline Arena* GetOwningArena() const { return arena_; } 619 620 static constexpr int kInitialSize = 0; 621 // A few notes on internal representation: 622 // 623 // We use an indirected approach, with struct Rep, to keep 624 // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support 625 // was added; namely, 3 8-byte machine words on x86-64. An instance of Rep is 626 // allocated only when the repeated field is non-empty, and it is a 627 // dynamically-sized struct (the header is directly followed by elements[]). 628 // We place arena_ and current_size_ directly in the object to avoid cache 629 // misses due to the indirection, because these fields are checked frequently. 630 // Placing all fields directly in the RepeatedPtrFieldBase instance would cost 631 // significant performance for memory-sensitive workloads. 632 Arena* arena_; 633 int current_size_; 634 int total_size_; 635 struct Rep { 636 int allocated_size; 637 // Here we declare a huge array as a way of approximating C's "flexible 638 // array member" feature without relying on undefined behavior. 639 void* elements[(std::numeric_limits<int>::max() - 2 * sizeof(int)) / 640 sizeof(void*)]; 641 }; 642 static constexpr size_t kRepHeaderSize = offsetof(Rep, elements); 643 Rep* rep_; 644 645 template <typename TypeHandler> 646 static inline typename TypeHandler::Type* cast(void* element) { 647 return reinterpret_cast<typename TypeHandler::Type*>(element); 648 } 649 template <typename TypeHandler> 650 static inline const typename TypeHandler::Type* cast(const void* element) { 651 return reinterpret_cast<const typename TypeHandler::Type*>(element); 652 } 653 654 // Non-templated inner function to avoid code duplication. Takes a function 655 // pointer to the type-specific (templated) inner allocate/merge loop. 656 void MergeFromInternal(const RepeatedPtrFieldBase& other, 657 void (RepeatedPtrFieldBase::*inner_loop)(void**, 658 void**, int, 659 int)) { 660 // Note: wrapper has already guaranteed that other.rep_ != nullptr here. 661 int other_size = other.current_size_; 662 void** other_elements = other.rep_->elements; 663 void** new_elements = InternalExtend(other_size); 664 int allocated_elems = rep_->allocated_size - current_size_; 665 (this->*inner_loop)(new_elements, other_elements, other_size, 666 allocated_elems); 667 current_size_ += other_size; 668 if (rep_->allocated_size < current_size_) { 669 rep_->allocated_size = current_size_; 670 } 671 } 672 673 // Merges other_elems to our_elems. 674 template <typename TypeHandler> 675 PROTOBUF_NOINLINE void MergeFromInnerLoop(void** our_elems, 676 void** other_elems, int length, 677 int already_allocated) { 678 if (already_allocated < length) { 679 Arena* arena = GetArena(); 680 typename TypeHandler::Type* elem_prototype = 681 reinterpret_cast<typename TypeHandler::Type*>(other_elems[0]); 682 for (int i = already_allocated; i < length; i++) { 683 // Allocate a new empty element that we'll merge into below 684 typename TypeHandler::Type* new_elem = 685 TypeHandler::NewFromPrototype(elem_prototype, arena); 686 our_elems[i] = new_elem; 687 } 688 } 689 // Main loop that does the actual merging 690 for (int i = 0; i < length; i++) { 691 // Already allocated: use existing element. 692 typename TypeHandler::Type* other_elem = 693 reinterpret_cast<typename TypeHandler::Type*>(other_elems[i]); 694 typename TypeHandler::Type* new_elem = 695 reinterpret_cast<typename TypeHandler::Type*>(our_elems[i]); 696 TypeHandler::Merge(*other_elem, new_elem); 697 } 698 } 699 700 // Internal helper: extends array space if necessary to contain 701 // |extend_amount| more elements, and returns a pointer to the element 702 // immediately following the old list of elements. This interface factors out 703 // common behavior from Reserve() and MergeFrom() to reduce code size. 704 // |extend_amount| must be > 0. 705 void** InternalExtend(int extend_amount); 706 707 // Internal helper for Add: adds "obj" as the next element in the 708 // array, including potentially resizing the array with Reserve if 709 // needed 710 void* AddOutOfLineHelper(void* obj); 711 712 // The reflection implementation needs to call protected methods directly, 713 // reinterpreting pointers as being to Message instead of a specific Message 714 // subclass. 715 friend class ::PROTOBUF_NAMESPACE_ID::Reflection; 716 friend class ::PROTOBUF_NAMESPACE_ID::internal::SwapFieldHelper; 717 718 // ExtensionSet stores repeated message extensions as 719 // RepeatedPtrField<MessageLite>, but non-lite ExtensionSets need to implement 720 // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong() 721 // reinterpreting MessageLite as Message. ExtensionSet also needs to make use 722 // of AddFromCleared(), which is not part of the public interface. 723 friend class ExtensionSet; 724 725 // The MapFieldBase implementation needs to call protected methods directly, 726 // reinterpreting pointers as being to Message instead of a specific Message 727 // subclass. 728 friend class MapFieldBase; 729 friend class MapFieldBaseStub; 730 731 // The table-driven MergePartialFromCodedStream implementation needs to 732 // operate on RepeatedPtrField<MessageLite>. 733 friend class MergePartialFromCodedStreamHelper; 734 friend class AccessorHelper; 735 template <typename T> 736 friend struct google::protobuf::WeakRepeatedPtrField; 737 friend class internal::TcParser; // TODO(jorg): Remove this friend. 738 }; 739 740 template <typename GenericType> 741 class GenericTypeHandler { 742 public: 743 typedef GenericType Type; 744 using Movable = IsMovable<GenericType>; 745 746 static inline GenericType* New(Arena* arena) { 747 return Arena::CreateMaybeMessage<Type>(arena); 748 } 749 static inline GenericType* New(Arena* arena, GenericType&& value) { 750 return Arena::Create<GenericType>(arena, std::move(value)); 751 } 752 static inline GenericType* NewFromPrototype(const GenericType* prototype, 753 Arena* arena = nullptr) { 754 return New(arena); 755 } 756 static inline void Delete(GenericType* value, Arena* arena) { 757 if (arena == nullptr) { 758 delete value; 759 } 760 } 761 static inline Arena* GetOwningArena(GenericType* value) { 762 return Arena::GetOwningArena<Type>(value); 763 } 764 765 static inline void Clear(GenericType* value) { value->Clear(); } 766 static void Merge(const GenericType& from, GenericType* to); 767 static inline size_t SpaceUsedLong(const GenericType& value) { 768 return value.SpaceUsedLong(); 769 } 770 }; 771 772 // NewFromPrototypeHelper() is not defined inline here, as we will need to do a 773 // virtual function dispatch anyways to go from Message* to call New/Merge. (The 774 // additional helper is needed as a workaround for MSVC.) 775 MessageLite* NewFromPrototypeHelper(const MessageLite* prototype, Arena* arena); 776 777 template <> 778 inline MessageLite* GenericTypeHandler<MessageLite>::NewFromPrototype( 779 const MessageLite* prototype, Arena* arena) { 780 return NewFromPrototypeHelper(prototype, arena); 781 } 782 template <> 783 inline Arena* GenericTypeHandler<MessageLite>::GetOwningArena( 784 MessageLite* value) { 785 return value->GetOwningArena(); 786 } 787 788 template <typename GenericType> 789 PROTOBUF_NOINLINE inline void GenericTypeHandler<GenericType>::Merge( 790 const GenericType& from, GenericType* to) { 791 to->MergeFrom(from); 792 } 793 template <> 794 void GenericTypeHandler<MessageLite>::Merge(const MessageLite& from, 795 MessageLite* to); 796 797 template <> 798 inline void GenericTypeHandler<std::string>::Clear(std::string* value) { 799 value->clear(); 800 } 801 template <> 802 void GenericTypeHandler<std::string>::Merge(const std::string& from, 803 std::string* to); 804 805 // Message specialization bodies defined in message.cc. This split is necessary 806 // to allow proto2-lite (which includes this header) to be independent of 807 // Message. 808 template <> 809 PROTOBUF_EXPORT Message* GenericTypeHandler<Message>::NewFromPrototype( 810 const Message* prototype, Arena* arena); 811 template <> 812 PROTOBUF_EXPORT Arena* GenericTypeHandler<Message>::GetOwningArena( 813 Message* value); 814 815 class StringTypeHandler { 816 public: 817 typedef std::string Type; 818 using Movable = IsMovable<Type>; 819 820 static inline std::string* New(Arena* arena) { 821 return Arena::Create<std::string>(arena); 822 } 823 static inline std::string* New(Arena* arena, std::string&& value) { 824 return Arena::Create<std::string>(arena, std::move(value)); 825 } 826 static inline std::string* NewFromPrototype(const std::string*, 827 Arena* arena) { 828 return New(arena); 829 } 830 static inline Arena* GetOwningArena(std::string*) { return nullptr; } 831 static inline void Delete(std::string* value, Arena* arena) { 832 if (arena == nullptr) { 833 delete value; 834 } 835 } 836 static inline void Clear(std::string* value) { value->clear(); } 837 static inline void Merge(const std::string& from, std::string* to) { 838 *to = from; 839 } 840 static size_t SpaceUsedLong(const std::string& value) { 841 return sizeof(value) + StringSpaceUsedExcludingSelfLong(value); 842 } 843 }; 844 845 } // namespace internal 846 847 // RepeatedPtrField is like RepeatedField, but used for repeated strings or 848 // Messages. 849 template <typename Element> 850 class RepeatedPtrField final : private internal::RepeatedPtrFieldBase { 851 852 public: 853 constexpr RepeatedPtrField(); 854 explicit RepeatedPtrField(Arena* arena); 855 856 RepeatedPtrField(const RepeatedPtrField& other); 857 858 template <typename Iter, 859 typename = typename std::enable_if<std::is_constructible< 860 Element, decltype(*std::declval<Iter>())>::value>::type> 861 RepeatedPtrField(Iter begin, Iter end); 862 863 ~RepeatedPtrField(); 864 865 RepeatedPtrField& operator=(const RepeatedPtrField& other); 866 867 RepeatedPtrField(RepeatedPtrField&& other) noexcept; 868 RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept; 869 870 bool empty() const; 871 int size() const; 872 873 const Element& Get(int index) const; 874 Element* Mutable(int index); 875 Element* Add(); 876 void Add(Element&& value); 877 // Append elements in the range [begin, end) after reserving 878 // the appropriate number of elements. 879 template <typename Iter> 880 void Add(Iter begin, Iter end); 881 882 const Element& operator[](int index) const { return Get(index); } 883 Element& operator[](int index) { return *Mutable(index); } 884 885 const Element& at(int index) const; 886 Element& at(int index); 887 888 // Removes the last element in the array. 889 // Ownership of the element is retained by the array. 890 void RemoveLast(); 891 892 // Deletes elements with indices in the range [start .. start+num-1]. 893 // Caution: moves all elements with indices [start+num .. ]. 894 // Calling this routine inside a loop can cause quadratic behavior. 895 void DeleteSubrange(int start, int num); 896 897 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear(); 898 void MergeFrom(const RepeatedPtrField& other); 899 PROTOBUF_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedPtrField& other); 900 901 // Replaces the contents with RepeatedPtrField(begin, end). 902 template <typename Iter> 903 PROTOBUF_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end); 904 905 // Reserves space to expand the field to at least the given size. This only 906 // resizes the pointer array; it doesn't allocate any objects. If the 907 // array is grown, it will always be at least doubled in size. 908 void Reserve(int new_size); 909 910 int Capacity() const; 911 912 // Gets the underlying array. This pointer is possibly invalidated by 913 // any add or remove operation. 914 // 915 // This API is deprecated. Instead of directly working with element array, 916 // use APIs in repeated_field_util.h; e.g. sorting, etc. 917 PROTOBUF_DEPRECATED_MSG("Use APIs in repeated_field_util.h") 918 Element** mutable_data(); 919 const Element* const* data() const; 920 921 // Swaps entire contents with "other". If they are on separate arenas, then 922 // copies data. 923 void Swap(RepeatedPtrField* other); 924 925 // Swaps entire contents with "other". Caller should guarantee that either 926 // both fields are on the same arena or both are on the heap. Swapping between 927 // different arenas with this function is disallowed and is caught via 928 // GOOGLE_DCHECK. 929 void UnsafeArenaSwap(RepeatedPtrField* other); 930 931 // Swaps two elements. 932 void SwapElements(int index1, int index2); 933 934 // STL-like iterator support 935 typedef internal::RepeatedPtrIterator<Element> iterator; 936 typedef internal::RepeatedPtrIterator<const Element> const_iterator; 937 typedef Element value_type; 938 typedef value_type& reference; 939 typedef const value_type& const_reference; 940 typedef value_type* pointer; 941 typedef const value_type* const_pointer; 942 typedef int size_type; 943 typedef ptrdiff_t difference_type; 944 945 iterator begin(); 946 const_iterator begin() const; 947 const_iterator cbegin() const; 948 iterator end(); 949 const_iterator end() const; 950 const_iterator cend() const; 951 952 // Reverse iterator support 953 typedef std::reverse_iterator<const_iterator> const_reverse_iterator; 954 typedef std::reverse_iterator<iterator> reverse_iterator; 955 reverse_iterator rbegin() { return reverse_iterator(end()); } 956 const_reverse_iterator rbegin() const { 957 return const_reverse_iterator(end()); 958 } 959 reverse_iterator rend() { return reverse_iterator(begin()); } 960 const_reverse_iterator rend() const { 961 return const_reverse_iterator(begin()); 962 } 963 964 // Custom STL-like iterator that iterates over and returns the underlying 965 // pointers to Element rather than Element itself. 966 typedef internal::RepeatedPtrOverPtrsIterator<Element*, void*> 967 pointer_iterator; 968 typedef internal::RepeatedPtrOverPtrsIterator<const Element* const, 969 const void* const> 970 const_pointer_iterator; 971 pointer_iterator pointer_begin(); 972 const_pointer_iterator pointer_begin() const; 973 pointer_iterator pointer_end(); 974 const_pointer_iterator pointer_end() const; 975 976 // Returns (an estimate of) the number of bytes used by the repeated field, 977 // excluding sizeof(*this). 978 size_t SpaceUsedExcludingSelfLong() const; 979 980 int SpaceUsedExcludingSelf() const { 981 return internal::ToIntSize(SpaceUsedExcludingSelfLong()); 982 } 983 984 // Advanced memory management -------------------------------------- 985 // When hardcore memory management becomes necessary -- as it sometimes 986 // does here at Google -- the following methods may be useful. 987 988 // Adds an already-allocated object, passing ownership to the 989 // RepeatedPtrField. 990 // 991 // Note that some special behavior occurs with respect to arenas: 992 // 993 // (i) if this field holds submessages, the new submessage will be copied if 994 // the original is in an arena and this RepeatedPtrField is either in a 995 // different arena, or on the heap. 996 // (ii) if this field holds strings, the passed-in string *must* be 997 // heap-allocated, not arena-allocated. There is no way to dynamically check 998 // this at runtime, so User Beware. 999 void AddAllocated(Element* value); 1000 1001 // Removes and returns the last element, passing ownership to the caller. 1002 // Requires: size() > 0 1003 // 1004 // If this RepeatedPtrField is on an arena, an object copy is required to pass 1005 // ownership back to the user (for compatible semantics). Use 1006 // UnsafeArenaReleaseLast() if this behavior is undesired. 1007 PROTOBUF_NODISCARD Element* ReleaseLast(); 1008 1009 // Adds an already-allocated object, skipping arena-ownership checks. The user 1010 // must guarantee that the given object is in the same arena as this 1011 // RepeatedPtrField. 1012 // It is also useful in legacy code that uses temporary ownership to avoid 1013 // copies. Example: 1014 // RepeatedPtrField<T> temp_field; 1015 // temp_field.UnsafeArenaAddAllocated(new T); 1016 // ... // Do something with temp_field 1017 // temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr); 1018 // If you put temp_field on the arena this fails, because the ownership 1019 // transfers to the arena at the "AddAllocated" call and is not released 1020 // anymore, causing a double delete. UnsafeArenaAddAllocated prevents this. 1021 void UnsafeArenaAddAllocated(Element* value); 1022 1023 // Removes and returns the last element. Unlike ReleaseLast, the returned 1024 // pointer is always to the original object. This may be in an arena, in 1025 // which case it would have the arena's lifetime. 1026 // Requires: current_size_ > 0 1027 Element* UnsafeArenaReleaseLast(); 1028 1029 // Extracts elements with indices in the range "[start .. start+num-1]". 1030 // The caller assumes ownership of the extracted elements and is responsible 1031 // for deleting them when they are no longer needed. 1032 // If "elements" is non-nullptr, then pointers to the extracted elements 1033 // are stored in "elements[0 .. num-1]" for the convenience of the caller. 1034 // If "elements" is nullptr, then the caller must use some other mechanism 1035 // to perform any further operations (like deletion) on these elements. 1036 // Caution: implementation also moves elements with indices [start+num ..]. 1037 // Calling this routine inside a loop can cause quadratic behavior. 1038 // 1039 // Memory copying behavior is identical to ReleaseLast(), described above: if 1040 // this RepeatedPtrField is on an arena, an object copy is performed for each 1041 // returned element, so that all returned element pointers are to 1042 // heap-allocated copies. If this copy is not desired, the user should call 1043 // UnsafeArenaExtractSubrange(). 1044 void ExtractSubrange(int start, int num, Element** elements); 1045 1046 // Identical to ExtractSubrange() described above, except that no object 1047 // copies are ever performed. Instead, the raw object pointers are returned. 1048 // Thus, if on an arena, the returned objects must not be freed, because they 1049 // will not be heap-allocated objects. 1050 void UnsafeArenaExtractSubrange(int start, int num, Element** elements); 1051 1052 // When elements are removed by calls to RemoveLast() or Clear(), they 1053 // are not actually freed. Instead, they are cleared and kept so that 1054 // they can be reused later. This can save lots of CPU time when 1055 // repeatedly reusing a protocol message for similar purposes. 1056 // 1057 // Hardcore programs may choose to manipulate these cleared objects 1058 // to better optimize memory management using the following routines. 1059 1060 // Gets the number of cleared objects that are currently being kept 1061 // around for reuse. 1062 int ClearedCount() const; 1063 #ifndef PROTOBUF_FUTURE_BREAKING_CHANGES 1064 // Adds an element to the pool of cleared objects, passing ownership to 1065 // the RepeatedPtrField. The element must be cleared prior to calling 1066 // this method. 1067 // 1068 // This method cannot be called when either the repeated field or |value| is 1069 // on an arena; both cases will trigger a GOOGLE_DCHECK-failure. 1070 void AddCleared(Element* value); 1071 // Removes and returns a single element from the cleared pool, passing 1072 // ownership to the caller. The element is guaranteed to be cleared. 1073 // Requires: ClearedCount() > 0 1074 // 1075 // This method cannot be called when the repeated field is on an arena; doing 1076 // so will trigger a GOOGLE_DCHECK-failure. 1077 PROTOBUF_NODISCARD Element* ReleaseCleared(); 1078 #endif // !PROTOBUF_FUTURE_BREAKING_CHANGES 1079 1080 // Removes the element referenced by position. 1081 // 1082 // Returns an iterator to the element immediately following the removed 1083 // element. 1084 // 1085 // Invalidates all iterators at or after the removed element, including end(). 1086 iterator erase(const_iterator position); 1087 1088 // Removes the elements in the range [first, last). 1089 // 1090 // Returns an iterator to the element immediately following the removed range. 1091 // 1092 // Invalidates all iterators at or after the removed range, including end(). 1093 iterator erase(const_iterator first, const_iterator last); 1094 1095 // Gets the arena on which this RepeatedPtrField stores its elements. 1096 inline Arena* GetArena() const; 1097 1098 // For internal use only. 1099 // 1100 // This is public due to it being called by generated code. 1101 void InternalSwap(RepeatedPtrField* other) { 1102 internal::RepeatedPtrFieldBase::InternalSwap(other); 1103 } 1104 1105 private: 1106 // Note: RepeatedPtrField SHOULD NOT be subclassed by users. 1107 class TypeHandler; 1108 1109 // Implementations for ExtractSubrange(). The copying behavior must be 1110 // included only if the type supports the necessary operations (e.g., 1111 // MergeFrom()), so we must resolve this at compile time. ExtractSubrange() 1112 // uses SFINAE to choose one of the below implementations. 1113 void ExtractSubrangeInternal(int start, int num, Element** elements, 1114 std::true_type); 1115 void ExtractSubrangeInternal(int start, int num, Element** elements, 1116 std::false_type); 1117 1118 friend class Arena; 1119 1120 template <typename T> 1121 friend struct WeakRepeatedPtrField; 1122 1123 typedef void InternalArenaConstructable_; 1124 1125 }; 1126 1127 // ------------------------------------------------------------------- 1128 1129 template <typename Element> 1130 class RepeatedPtrField<Element>::TypeHandler 1131 : public internal::GenericTypeHandler<Element> {}; 1132 1133 template <> 1134 class RepeatedPtrField<std::string>::TypeHandler 1135 : public internal::StringTypeHandler {}; 1136 1137 template <typename Element> 1138 constexpr RepeatedPtrField<Element>::RepeatedPtrField() 1139 : RepeatedPtrFieldBase() {} 1140 1141 template <typename Element> 1142 inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena) 1143 : RepeatedPtrFieldBase(arena) {} 1144 1145 template <typename Element> 1146 inline RepeatedPtrField<Element>::RepeatedPtrField( 1147 const RepeatedPtrField& other) 1148 : RepeatedPtrFieldBase() { 1149 MergeFrom(other); 1150 } 1151 1152 template <typename Element> 1153 template <typename Iter, typename> 1154 inline RepeatedPtrField<Element>::RepeatedPtrField(Iter begin, Iter end) { 1155 Add(begin, end); 1156 } 1157 1158 template <typename Element> 1159 RepeatedPtrField<Element>::~RepeatedPtrField() { 1160 #ifdef __cpp_if_constexpr 1161 if constexpr (std::is_base_of<MessageLite, Element>::value) { 1162 #else 1163 if (std::is_base_of<MessageLite, Element>::value) { 1164 #endif 1165 if (NeedsDestroy()) DestroyProtos(); 1166 } else { 1167 Destroy<TypeHandler>(); 1168 } 1169 } 1170 1171 template <typename Element> 1172 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=( 1173 const RepeatedPtrField& other) { 1174 if (this != &other) CopyFrom(other); 1175 return *this; 1176 } 1177 1178 template <typename Element> 1179 inline RepeatedPtrField<Element>::RepeatedPtrField( 1180 RepeatedPtrField&& other) noexcept 1181 : RepeatedPtrField() { 1182 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE 1183 CopyFrom(other); 1184 #else // PROTOBUF_FORCE_COPY_IN_MOVE 1185 // We don't just call Swap(&other) here because it would perform 3 copies if 1186 // other is on an arena. This field can't be on an arena because arena 1187 // construction always uses the Arena* accepting constructor. 1188 if (other.GetArena()) { 1189 CopyFrom(other); 1190 } else { 1191 InternalSwap(&other); 1192 } 1193 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE 1194 } 1195 1196 template <typename Element> 1197 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=( 1198 RepeatedPtrField&& other) noexcept { 1199 // We don't just call Swap(&other) here because it would perform 3 copies if 1200 // the two fields are on different arenas. 1201 if (this != &other) { 1202 if (GetArena() != other.GetArena() 1203 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE 1204 || GetArena() == nullptr 1205 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE 1206 ) { 1207 CopyFrom(other); 1208 } else { 1209 InternalSwap(&other); 1210 } 1211 } 1212 return *this; 1213 } 1214 1215 template <typename Element> 1216 inline bool RepeatedPtrField<Element>::empty() const { 1217 return RepeatedPtrFieldBase::empty(); 1218 } 1219 1220 template <typename Element> 1221 inline int RepeatedPtrField<Element>::size() const { 1222 return RepeatedPtrFieldBase::size(); 1223 } 1224 1225 template <typename Element> 1226 inline const Element& RepeatedPtrField<Element>::Get(int index) const { 1227 return RepeatedPtrFieldBase::Get<TypeHandler>(index); 1228 } 1229 1230 template <typename Element> 1231 inline const Element& RepeatedPtrField<Element>::at(int index) const { 1232 return RepeatedPtrFieldBase::at<TypeHandler>(index); 1233 } 1234 1235 template <typename Element> 1236 inline Element& RepeatedPtrField<Element>::at(int index) { 1237 return RepeatedPtrFieldBase::at<TypeHandler>(index); 1238 } 1239 1240 1241 template <typename Element> 1242 inline Element* RepeatedPtrField<Element>::Mutable(int index) { 1243 return RepeatedPtrFieldBase::Mutable<TypeHandler>(index); 1244 } 1245 1246 template <typename Element> 1247 inline Element* RepeatedPtrField<Element>::Add() { 1248 return RepeatedPtrFieldBase::Add<TypeHandler>(); 1249 } 1250 1251 template <typename Element> 1252 inline void RepeatedPtrField<Element>::Add(Element&& value) { 1253 RepeatedPtrFieldBase::Add<TypeHandler>(std::move(value)); 1254 } 1255 1256 template <typename Element> 1257 template <typename Iter> 1258 inline void RepeatedPtrField<Element>::Add(Iter begin, Iter end) { 1259 if (std::is_base_of< 1260 std::forward_iterator_tag, 1261 typename std::iterator_traits<Iter>::iterator_category>::value) { 1262 int reserve = std::distance(begin, end); 1263 Reserve(size() + reserve); 1264 } 1265 for (; begin != end; ++begin) { 1266 *Add() = *begin; 1267 } 1268 } 1269 1270 template <typename Element> 1271 inline void RepeatedPtrField<Element>::RemoveLast() { 1272 RepeatedPtrFieldBase::RemoveLast<TypeHandler>(); 1273 } 1274 1275 template <typename Element> 1276 inline void RepeatedPtrField<Element>::DeleteSubrange(int start, int num) { 1277 GOOGLE_DCHECK_GE(start, 0); 1278 GOOGLE_DCHECK_GE(num, 0); 1279 GOOGLE_DCHECK_LE(start + num, size()); 1280 for (int i = 0; i < num; ++i) { 1281 RepeatedPtrFieldBase::Delete<TypeHandler>(start + i); 1282 } 1283 UnsafeArenaExtractSubrange(start, num, nullptr); 1284 } 1285 1286 template <typename Element> 1287 inline void RepeatedPtrField<Element>::ExtractSubrange(int start, int num, 1288 Element** elements) { 1289 typename internal::TypeImplementsMergeBehavior< 1290 typename TypeHandler::Type>::type t; 1291 ExtractSubrangeInternal(start, num, elements, t); 1292 } 1293 1294 // ExtractSubrange() implementation for types that implement merge/copy 1295 // behavior. 1296 template <typename Element> 1297 inline void RepeatedPtrField<Element>::ExtractSubrangeInternal( 1298 int start, int num, Element** elements, std::true_type) { 1299 GOOGLE_DCHECK_GE(start, 0); 1300 GOOGLE_DCHECK_GE(num, 0); 1301 GOOGLE_DCHECK_LE(start + num, size()); 1302 1303 if (num == 0) return; 1304 1305 GOOGLE_DCHECK_NE(elements, nullptr) 1306 << "Releasing elements without transferring ownership is an unsafe " 1307 "operation. Use UnsafeArenaExtractSubrange."; 1308 if (elements == nullptr) { 1309 CloseGap(start, num); 1310 return; 1311 } 1312 1313 Arena* arena = GetArena(); 1314 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE 1315 // Always copy. 1316 for (int i = 0; i < num; ++i) { 1317 elements[i] = copy<TypeHandler>( 1318 RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start)); 1319 } 1320 if (arena == nullptr) { 1321 for (int i = 0; i < num; ++i) { 1322 delete RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start); 1323 } 1324 } 1325 #else // PROTOBUF_FORCE_COPY_IN_RELEASE 1326 // If we're on an arena, we perform a copy for each element so that the 1327 // returned elements are heap-allocated. Otherwise, just forward it. 1328 if (arena != nullptr) { 1329 for (int i = 0; i < num; ++i) { 1330 elements[i] = copy<TypeHandler>( 1331 RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start)); 1332 } 1333 } else { 1334 for (int i = 0; i < num; ++i) { 1335 elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start); 1336 } 1337 } 1338 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE 1339 CloseGap(start, num); 1340 } 1341 1342 // ExtractSubrange() implementation for types that do not implement merge/copy 1343 // behavior. 1344 template <typename Element> 1345 inline void RepeatedPtrField<Element>::ExtractSubrangeInternal( 1346 int start, int num, Element** elements, std::false_type) { 1347 // This case is identical to UnsafeArenaExtractSubrange(). However, since 1348 // ExtractSubrange() must return heap-allocated objects by contract, and we 1349 // cannot fulfill this contract if we are an on arena, we must GOOGLE_DCHECK() that 1350 // we are not on an arena. 1351 GOOGLE_DCHECK(GetArena() == nullptr) 1352 << "ExtractSubrange() when arena is non-nullptr is only supported when " 1353 << "the Element type supplies a MergeFrom() operation to make copies."; 1354 UnsafeArenaExtractSubrange(start, num, elements); 1355 } 1356 1357 template <typename Element> 1358 inline void RepeatedPtrField<Element>::UnsafeArenaExtractSubrange( 1359 int start, int num, Element** elements) { 1360 GOOGLE_DCHECK_GE(start, 0); 1361 GOOGLE_DCHECK_GE(num, 0); 1362 GOOGLE_DCHECK_LE(start + num, size()); 1363 1364 if (num > 0) { 1365 // Save the values of the removed elements if requested. 1366 if (elements != nullptr) { 1367 for (int i = 0; i < num; ++i) { 1368 elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start); 1369 } 1370 } 1371 CloseGap(start, num); 1372 } 1373 } 1374 1375 template <typename Element> 1376 inline void RepeatedPtrField<Element>::Clear() { 1377 RepeatedPtrFieldBase::Clear<TypeHandler>(); 1378 } 1379 1380 template <typename Element> 1381 inline void RepeatedPtrField<Element>::MergeFrom( 1382 const RepeatedPtrField& other) { 1383 RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other); 1384 } 1385 1386 template <typename Element> 1387 inline void RepeatedPtrField<Element>::CopyFrom(const RepeatedPtrField& other) { 1388 RepeatedPtrFieldBase::CopyFrom<TypeHandler>(other); 1389 } 1390 1391 template <typename Element> 1392 template <typename Iter> 1393 inline void RepeatedPtrField<Element>::Assign(Iter begin, Iter end) { 1394 Clear(); 1395 Add(begin, end); 1396 } 1397 1398 template <typename Element> 1399 inline typename RepeatedPtrField<Element>::iterator 1400 RepeatedPtrField<Element>::erase(const_iterator position) { 1401 return erase(position, position + 1); 1402 } 1403 1404 template <typename Element> 1405 inline typename RepeatedPtrField<Element>::iterator 1406 RepeatedPtrField<Element>::erase(const_iterator first, const_iterator last) { 1407 size_type pos_offset = std::distance(cbegin(), first); 1408 size_type last_offset = std::distance(cbegin(), last); 1409 DeleteSubrange(pos_offset, last_offset - pos_offset); 1410 return begin() + pos_offset; 1411 } 1412 1413 template <typename Element> 1414 inline Element** RepeatedPtrField<Element>::mutable_data() { 1415 return RepeatedPtrFieldBase::mutable_data<TypeHandler>(); 1416 } 1417 1418 template <typename Element> 1419 inline const Element* const* RepeatedPtrField<Element>::data() const { 1420 return RepeatedPtrFieldBase::data<TypeHandler>(); 1421 } 1422 1423 template <typename Element> 1424 inline void RepeatedPtrField<Element>::Swap(RepeatedPtrField* other) { 1425 if (this == other) return; 1426 RepeatedPtrFieldBase::Swap<TypeHandler>(other); 1427 } 1428 1429 template <typename Element> 1430 inline void RepeatedPtrField<Element>::UnsafeArenaSwap( 1431 RepeatedPtrField* other) { 1432 if (this == other) return; 1433 GOOGLE_DCHECK_EQ(GetArena(), other->GetArena()); 1434 RepeatedPtrFieldBase::InternalSwap(other); 1435 } 1436 1437 template <typename Element> 1438 inline void RepeatedPtrField<Element>::SwapElements(int index1, int index2) { 1439 RepeatedPtrFieldBase::SwapElements(index1, index2); 1440 } 1441 1442 template <typename Element> 1443 inline Arena* RepeatedPtrField<Element>::GetArena() const { 1444 return RepeatedPtrFieldBase::GetArena(); 1445 } 1446 1447 template <typename Element> 1448 inline size_t RepeatedPtrField<Element>::SpaceUsedExcludingSelfLong() const { 1449 return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong<TypeHandler>(); 1450 } 1451 1452 template <typename Element> 1453 inline void RepeatedPtrField<Element>::AddAllocated(Element* value) { 1454 RepeatedPtrFieldBase::AddAllocated<TypeHandler>(value); 1455 } 1456 1457 template <typename Element> 1458 inline void RepeatedPtrField<Element>::UnsafeArenaAddAllocated(Element* value) { 1459 RepeatedPtrFieldBase::UnsafeArenaAddAllocated<TypeHandler>(value); 1460 } 1461 1462 template <typename Element> 1463 inline Element* RepeatedPtrField<Element>::ReleaseLast() { 1464 return RepeatedPtrFieldBase::ReleaseLast<TypeHandler>(); 1465 } 1466 1467 template <typename Element> 1468 inline Element* RepeatedPtrField<Element>::UnsafeArenaReleaseLast() { 1469 return RepeatedPtrFieldBase::UnsafeArenaReleaseLast<TypeHandler>(); 1470 } 1471 1472 template <typename Element> 1473 inline int RepeatedPtrField<Element>::ClearedCount() const { 1474 return RepeatedPtrFieldBase::ClearedCount(); 1475 } 1476 1477 #ifndef PROTOBUF_FUTURE_BREAKING_CHANGES 1478 template <typename Element> 1479 inline void RepeatedPtrField<Element>::AddCleared(Element* value) { 1480 return RepeatedPtrFieldBase::AddCleared<TypeHandler>(value); 1481 } 1482 1483 template <typename Element> 1484 inline Element* RepeatedPtrField<Element>::ReleaseCleared() { 1485 return RepeatedPtrFieldBase::ReleaseCleared<TypeHandler>(); 1486 } 1487 #endif // !PROTOBUF_FUTURE_BREAKING_CHANGES 1488 1489 template <typename Element> 1490 inline void RepeatedPtrField<Element>::Reserve(int new_size) { 1491 return RepeatedPtrFieldBase::Reserve(new_size); 1492 } 1493 1494 template <typename Element> 1495 inline int RepeatedPtrField<Element>::Capacity() const { 1496 return RepeatedPtrFieldBase::Capacity(); 1497 } 1498 1499 // ------------------------------------------------------------------- 1500 1501 namespace internal { 1502 1503 // STL-like iterator implementation for RepeatedPtrField. You should not 1504 // refer to this class directly; use RepeatedPtrField<T>::iterator instead. 1505 // 1506 // The iterator for RepeatedPtrField<T>, RepeatedPtrIterator<T>, is 1507 // very similar to iterator_ptr<T**> in util/gtl/iterator_adaptors.h, 1508 // but adds random-access operators and is modified to wrap a void** base 1509 // iterator (since RepeatedPtrField stores its array as a void* array and 1510 // casting void** to T** would violate C++ aliasing rules). 1511 // 1512 // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin 1513 // (jyasskin@google.com). 1514 template <typename Element> 1515 class RepeatedPtrIterator { 1516 public: 1517 using iterator = RepeatedPtrIterator<Element>; 1518 using iterator_category = std::random_access_iterator_tag; 1519 using value_type = typename std::remove_const<Element>::type; 1520 using difference_type = std::ptrdiff_t; 1521 using pointer = Element*; 1522 using reference = Element&; 1523 1524 RepeatedPtrIterator() : it_(nullptr) {} 1525 explicit RepeatedPtrIterator(void* const* it) : it_(it) {} 1526 1527 // Allows "upcasting" from RepeatedPtrIterator<T**> to 1528 // RepeatedPtrIterator<const T*const*>. 1529 template <typename OtherElement, 1530 typename std::enable_if<std::is_convertible< 1531 OtherElement*, pointer>::value>::type* = nullptr> 1532 RepeatedPtrIterator(const RepeatedPtrIterator<OtherElement>& other) 1533 : it_(other.it_) {} 1534 1535 // dereferenceable 1536 reference operator*() const { return *reinterpret_cast<Element*>(*it_); } 1537 pointer operator->() const { return &(operator*()); } 1538 1539 // {inc,dec}rementable 1540 iterator& operator++() { 1541 ++it_; 1542 return *this; 1543 } 1544 iterator operator++(int) { return iterator(it_++); } 1545 iterator& operator--() { 1546 --it_; 1547 return *this; 1548 } 1549 iterator operator--(int) { return iterator(it_--); } 1550 1551 // equality_comparable 1552 friend bool operator==(const iterator& x, const iterator& y) { 1553 return x.it_ == y.it_; 1554 } 1555 friend bool operator!=(const iterator& x, const iterator& y) { 1556 return x.it_ != y.it_; 1557 } 1558 1559 // less_than_comparable 1560 friend bool operator<(const iterator& x, const iterator& y) { 1561 return x.it_ < y.it_; 1562 } 1563 friend bool operator<=(const iterator& x, const iterator& y) { 1564 return x.it_ <= y.it_; 1565 } 1566 friend bool operator>(const iterator& x, const iterator& y) { 1567 return x.it_ > y.it_; 1568 } 1569 friend bool operator>=(const iterator& x, const iterator& y) { 1570 return x.it_ >= y.it_; 1571 } 1572 1573 // addable, subtractable 1574 iterator& operator+=(difference_type d) { 1575 it_ += d; 1576 return *this; 1577 } 1578 friend iterator operator+(iterator it, const difference_type d) { 1579 it += d; 1580 return it; 1581 } 1582 friend iterator operator+(const difference_type d, iterator it) { 1583 it += d; 1584 return it; 1585 } 1586 iterator& operator-=(difference_type d) { 1587 it_ -= d; 1588 return *this; 1589 } 1590 friend iterator operator-(iterator it, difference_type d) { 1591 it -= d; 1592 return it; 1593 } 1594 1595 // indexable 1596 reference operator[](difference_type d) const { return *(*this + d); } 1597 1598 // random access iterator 1599 friend difference_type operator-(iterator it1, iterator it2) { 1600 return it1.it_ - it2.it_; 1601 } 1602 1603 private: 1604 template <typename OtherElement> 1605 friend class RepeatedPtrIterator; 1606 1607 // The internal iterator. 1608 void* const* it_; 1609 }; 1610 1611 // Provides an iterator that operates on pointers to the underlying objects 1612 // rather than the objects themselves as RepeatedPtrIterator does. 1613 // Consider using this when working with stl algorithms that change 1614 // the array. 1615 // The VoidPtr template parameter holds the type-agnostic pointer value 1616 // referenced by the iterator. It should either be "void *" for a mutable 1617 // iterator, or "const void* const" for a constant iterator. 1618 template <typename Element, typename VoidPtr> 1619 class RepeatedPtrOverPtrsIterator { 1620 public: 1621 using iterator = RepeatedPtrOverPtrsIterator<Element, VoidPtr>; 1622 using iterator_category = std::random_access_iterator_tag; 1623 using value_type = typename std::remove_const<Element>::type; 1624 using difference_type = std::ptrdiff_t; 1625 using pointer = Element*; 1626 using reference = Element&; 1627 1628 RepeatedPtrOverPtrsIterator() : it_(nullptr) {} 1629 explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {} 1630 1631 // Allows "upcasting" from RepeatedPtrOverPtrsIterator<T**> to 1632 // RepeatedPtrOverPtrsIterator<const T*const*>. 1633 template < 1634 typename OtherElement, typename OtherVoidPtr, 1635 typename std::enable_if< 1636 std::is_convertible<OtherElement*, pointer>::value && 1637 std::is_convertible<OtherVoidPtr*, VoidPtr>::value>::type* = nullptr> 1638 RepeatedPtrOverPtrsIterator( 1639 const RepeatedPtrOverPtrsIterator<OtherElement, OtherVoidPtr>& other) 1640 : it_(other.it_) {} 1641 1642 // dereferenceable 1643 reference operator*() const { return *reinterpret_cast<Element*>(it_); } 1644 pointer operator->() const { return &(operator*()); } 1645 1646 // {inc,dec}rementable 1647 iterator& operator++() { 1648 ++it_; 1649 return *this; 1650 } 1651 iterator operator++(int) { return iterator(it_++); } 1652 iterator& operator--() { 1653 --it_; 1654 return *this; 1655 } 1656 iterator operator--(int) { return iterator(it_--); } 1657 1658 // equality_comparable 1659 friend bool operator==(const iterator& x, const iterator& y) { 1660 return x.it_ == y.it_; 1661 } 1662 friend bool operator!=(const iterator& x, const iterator& y) { 1663 return x.it_ != y.it_; 1664 } 1665 1666 // less_than_comparable 1667 friend bool operator<(const iterator& x, const iterator& y) { 1668 return x.it_ < y.it_; 1669 } 1670 friend bool operator<=(const iterator& x, const iterator& y) { 1671 return x.it_ <= y.it_; 1672 } 1673 friend bool operator>(const iterator& x, const iterator& y) { 1674 return x.it_ > y.it_; 1675 } 1676 friend bool operator>=(const iterator& x, const iterator& y) { 1677 return x.it_ >= y.it_; 1678 } 1679 1680 // addable, subtractable 1681 iterator& operator+=(difference_type d) { 1682 it_ += d; 1683 return *this; 1684 } 1685 friend iterator operator+(iterator it, difference_type d) { 1686 it += d; 1687 return it; 1688 } 1689 friend iterator operator+(difference_type d, iterator it) { 1690 it += d; 1691 return it; 1692 } 1693 iterator& operator-=(difference_type d) { 1694 it_ -= d; 1695 return *this; 1696 } 1697 friend iterator operator-(iterator it, difference_type d) { 1698 it -= d; 1699 return it; 1700 } 1701 1702 // indexable 1703 reference operator[](difference_type d) const { return *(*this + d); } 1704 1705 // random access iterator 1706 friend difference_type operator-(iterator it1, iterator it2) { 1707 return it1.it_ - it2.it_; 1708 } 1709 1710 private: 1711 template <typename OtherElement, typename OtherVoidPtr> 1712 friend class RepeatedPtrOverPtrsIterator; 1713 1714 // The internal iterator. 1715 VoidPtr* it_; 1716 }; 1717 1718 } // namespace internal 1719 1720 template <typename Element> 1721 inline typename RepeatedPtrField<Element>::iterator 1722 RepeatedPtrField<Element>::begin() { 1723 return iterator(raw_data()); 1724 } 1725 template <typename Element> 1726 inline typename RepeatedPtrField<Element>::const_iterator 1727 RepeatedPtrField<Element>::begin() const { 1728 return iterator(raw_data()); 1729 } 1730 template <typename Element> 1731 inline typename RepeatedPtrField<Element>::const_iterator 1732 RepeatedPtrField<Element>::cbegin() const { 1733 return begin(); 1734 } 1735 template <typename Element> 1736 inline typename RepeatedPtrField<Element>::iterator 1737 RepeatedPtrField<Element>::end() { 1738 return iterator(raw_data() + size()); 1739 } 1740 template <typename Element> 1741 inline typename RepeatedPtrField<Element>::const_iterator 1742 RepeatedPtrField<Element>::end() const { 1743 return iterator(raw_data() + size()); 1744 } 1745 template <typename Element> 1746 inline typename RepeatedPtrField<Element>::const_iterator 1747 RepeatedPtrField<Element>::cend() const { 1748 return end(); 1749 } 1750 1751 template <typename Element> 1752 inline typename RepeatedPtrField<Element>::pointer_iterator 1753 RepeatedPtrField<Element>::pointer_begin() { 1754 return pointer_iterator(raw_mutable_data()); 1755 } 1756 template <typename Element> 1757 inline typename RepeatedPtrField<Element>::const_pointer_iterator 1758 RepeatedPtrField<Element>::pointer_begin() const { 1759 return const_pointer_iterator(const_cast<const void* const*>(raw_data())); 1760 } 1761 template <typename Element> 1762 inline typename RepeatedPtrField<Element>::pointer_iterator 1763 RepeatedPtrField<Element>::pointer_end() { 1764 return pointer_iterator(raw_mutable_data() + size()); 1765 } 1766 template <typename Element> 1767 inline typename RepeatedPtrField<Element>::const_pointer_iterator 1768 RepeatedPtrField<Element>::pointer_end() const { 1769 return const_pointer_iterator( 1770 const_cast<const void* const*>(raw_data() + size())); 1771 } 1772 1773 // Iterators and helper functions that follow the spirit of the STL 1774 // std::back_insert_iterator and std::back_inserter but are tailor-made 1775 // for RepeatedField and RepeatedPtrField. Typical usage would be: 1776 // 1777 // std::copy(some_sequence.begin(), some_sequence.end(), 1778 // RepeatedFieldBackInserter(proto.mutable_sequence())); 1779 // 1780 // Ported by johannes from util/gtl/proto-array-iterators.h 1781 1782 namespace internal { 1783 1784 // A back inserter for RepeatedPtrField objects. 1785 template <typename T> 1786 class RepeatedPtrFieldBackInsertIterator { 1787 public: 1788 using iterator_category = std::output_iterator_tag; 1789 using value_type = T; 1790 using pointer = void; 1791 using reference = void; 1792 using difference_type = std::ptrdiff_t; 1793 1794 RepeatedPtrFieldBackInsertIterator(RepeatedPtrField<T>* const mutable_field) 1795 : field_(mutable_field) {} 1796 RepeatedPtrFieldBackInsertIterator<T>& operator=(const T& value) { 1797 *field_->Add() = value; 1798 return *this; 1799 } 1800 RepeatedPtrFieldBackInsertIterator<T>& operator=( 1801 const T* const ptr_to_value) { 1802 *field_->Add() = *ptr_to_value; 1803 return *this; 1804 } 1805 RepeatedPtrFieldBackInsertIterator<T>& operator=(T&& value) { 1806 *field_->Add() = std::move(value); 1807 return *this; 1808 } 1809 RepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; } 1810 RepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; } 1811 RepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) { 1812 return *this; 1813 } 1814 1815 private: 1816 RepeatedPtrField<T>* field_; 1817 }; 1818 1819 // A back inserter for RepeatedPtrFields that inserts by transferring ownership 1820 // of a pointer. 1821 template <typename T> 1822 class AllocatedRepeatedPtrFieldBackInsertIterator { 1823 public: 1824 using iterator_category = std::output_iterator_tag; 1825 using value_type = T; 1826 using pointer = void; 1827 using reference = void; 1828 using difference_type = std::ptrdiff_t; 1829 1830 explicit AllocatedRepeatedPtrFieldBackInsertIterator( 1831 RepeatedPtrField<T>* const mutable_field) 1832 : field_(mutable_field) {} 1833 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=( 1834 T* const ptr_to_value) { 1835 field_->AddAllocated(ptr_to_value); 1836 return *this; 1837 } 1838 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; } 1839 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; } 1840 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) { 1841 return *this; 1842 } 1843 1844 private: 1845 RepeatedPtrField<T>* field_; 1846 }; 1847 1848 // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one 1849 // uses the UnsafeArenaAddAllocated instead. 1850 template <typename T> 1851 class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator { 1852 public: 1853 using iterator_category = std::output_iterator_tag; 1854 using value_type = T; 1855 using pointer = void; 1856 using reference = void; 1857 using difference_type = std::ptrdiff_t; 1858 1859 explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator( 1860 RepeatedPtrField<T>* const mutable_field) 1861 : field_(mutable_field) {} 1862 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=( 1863 T const* const ptr_to_value) { 1864 field_->UnsafeArenaAddAllocated(const_cast<T*>(ptr_to_value)); 1865 return *this; 1866 } 1867 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() { 1868 return *this; 1869 } 1870 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() { 1871 return *this; 1872 } 1873 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++( 1874 int /* unused */) { 1875 return *this; 1876 } 1877 1878 private: 1879 RepeatedPtrField<T>* field_; 1880 }; 1881 1882 } // namespace internal 1883 1884 // Provides a back insert iterator for RepeatedPtrField instances, 1885 // similar to std::back_inserter(). 1886 template <typename T> 1887 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedPtrFieldBackInserter( 1888 RepeatedPtrField<T>* const mutable_field) { 1889 return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field); 1890 } 1891 1892 // Special back insert iterator for RepeatedPtrField instances, just in 1893 // case someone wants to write generic template code that can access both 1894 // RepeatedFields and RepeatedPtrFields using a common name. 1895 template <typename T> 1896 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedFieldBackInserter( 1897 RepeatedPtrField<T>* const mutable_field) { 1898 return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field); 1899 } 1900 1901 // Provides a back insert iterator for RepeatedPtrField instances 1902 // similar to std::back_inserter() which transfers the ownership while 1903 // copying elements. 1904 template <typename T> 1905 internal::AllocatedRepeatedPtrFieldBackInsertIterator<T> 1906 AllocatedRepeatedPtrFieldBackInserter( 1907 RepeatedPtrField<T>* const mutable_field) { 1908 return internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>( 1909 mutable_field); 1910 } 1911 1912 // Similar to AllocatedRepeatedPtrFieldBackInserter, using 1913 // UnsafeArenaAddAllocated instead of AddAllocated. 1914 // This is slightly faster if that matters. It is also useful in legacy code 1915 // that uses temporary ownership to avoid copies. Example: 1916 // RepeatedPtrField<T> temp_field; 1917 // temp_field.UnsafeArenaAddAllocated(new T); 1918 // ... // Do something with temp_field 1919 // temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr); 1920 // Putting temp_field on the arena fails because the ownership transfers to the 1921 // arena at the "AddAllocated" call and is not released anymore causing a 1922 // double delete. This function uses UnsafeArenaAddAllocated to prevent this. 1923 template <typename T> 1924 internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T> 1925 UnsafeArenaAllocatedRepeatedPtrFieldBackInserter( 1926 RepeatedPtrField<T>* const mutable_field) { 1927 return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>( 1928 mutable_field); 1929 } 1930 1931 extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE 1932 RepeatedPtrField<std::string>; 1933 1934 } // namespace protobuf 1935 } // namespace google 1936 1937 #include <google/protobuf/port_undef.inc> 1938 1939 #endif // GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__ 1940