• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc.  All rights reserved.
3 //
4 // Use of this source code is governed by a BSD-style
5 // license that can be found in the LICENSE file or at
6 // https://developers.google.com/open-source/licenses/bsd
7 
8 // This file defines an Arena allocator for better allocation performance.
9 
10 #ifndef GOOGLE_PROTOBUF_ARENA_H__
11 #define GOOGLE_PROTOBUF_ARENA_H__
12 
13 #include <cstddef>
14 #include <cstdint>
15 #include <limits>
16 #include <new>  // IWYU pragma: keep for operator new().
17 #include <string>
18 #include <type_traits>
19 #include <utility>
20 #include <vector>
21 #if defined(_MSC_VER) && !defined(_LIBCPP_STD_VER) && !_HAS_EXCEPTIONS
22 // Work around bugs in MSVC <typeinfo> header when _HAS_EXCEPTIONS=0.
23 #include <exception>
24 #include <typeinfo>
25 namespace std {
26 using type_info = ::type_info;
27 }
28 #endif
29 
30 #include "absl/base/attributes.h"
31 #include "absl/base/macros.h"
32 #include "absl/base/optimization.h"
33 #include "absl/base/prefetch.h"
34 #include "absl/log/absl_check.h"
35 #include "absl/utility/internal/if_constexpr.h"
36 #include "google/protobuf/arena_align.h"
37 #include "google/protobuf/arena_allocation_policy.h"
38 #include "google/protobuf/port.h"
39 #include "google/protobuf/serial_arena.h"
40 #include "google/protobuf/thread_safe_arena.h"
41 
42 // Must be included last.
43 #include "google/protobuf/port_def.inc"
44 
45 #ifdef SWIG
46 #error "You cannot SWIG proto headers"
47 #endif
48 
49 namespace google {
50 namespace protobuf {
51 
52 struct ArenaOptions;  // defined below
53 class Arena;    // defined below
54 class Message;  // defined in message.h
55 class MessageLite;
56 template <typename Key, typename T>
57 class Map;
58 namespace internal {
59 struct RepeatedFieldBase;
60 class ExtensionSet;
61 }  // namespace internal
62 
63 namespace arena_metrics {
64 
65 void EnableArenaMetrics(ArenaOptions* options);
66 
67 }  // namespace arena_metrics
68 
69 namespace TestUtil {
70 class ReflectionTester;  // defined in test_util.h
71 }  // namespace TestUtil
72 
73 namespace internal {
74 
75 struct ArenaTestPeer;        // defined in arena_test_util.h
76 class InternalMetadata;      // defined in metadata_lite.h
77 class LazyField;             // defined in lazy_field.h
78 class EpsCopyInputStream;    // defined in parse_context.h
79 class UntypedMapBase;        // defined in map.h
80 class RepeatedPtrFieldBase;  // defined in repeated_ptr_field.h
81 class TcParser;              // defined in generated_message_tctable_impl.h
82 
83 template <typename Type>
84 class GenericTypeHandler;  // defined in repeated_field.h
85 
86 template <typename T>
arena_delete_object(void * object)87 void arena_delete_object(void* object) {
88   delete reinterpret_cast<T*>(object);
89 }
90 
CanUseInternalSwap(Arena * lhs,Arena * rhs)91 inline bool CanUseInternalSwap(Arena* lhs, Arena* rhs) {
92   if (DebugHardenForceCopyInSwap()) {
93     // We force copy in swap when we are not using an arena.
94     // If we did with an arena we would grow arena usage too much.
95     return lhs != nullptr && lhs == rhs;
96   } else {
97     return lhs == rhs;
98   }
99 }
100 
CanMoveWithInternalSwap(Arena * lhs,Arena * rhs)101 inline bool CanMoveWithInternalSwap(Arena* lhs, Arena* rhs) {
102   if (DebugHardenForceCopyInMove()) {
103     // We force copy in move when we are not using an arena.
104     // If we did with an arena we would grow arena usage too much.
105     return lhs != nullptr && lhs == rhs;
106   } else {
107     return lhs == rhs;
108   }
109 }
110 
111 }  // namespace internal
112 
113 // ArenaOptions provides optional additional parameters to arena construction
114 // that control its block-allocation behavior.
115 struct ArenaOptions {
116   // This defines the size of the first block requested from the system malloc.
117   // Subsequent block sizes will increase in a geometric series up to a maximum.
118   size_t start_block_size = internal::AllocationPolicy::kDefaultStartBlockSize;
119 
120   // This defines the maximum block size requested from system malloc (unless an
121   // individual arena allocation request occurs with a size larger than this
122   // maximum). Requested block sizes increase up to this value, then remain
123   // here.
124   size_t max_block_size = internal::AllocationPolicy::kDefaultMaxBlockSize;
125 
126   // An initial block of memory for the arena to use, or nullptr for none. If
127   // provided, the block must live at least as long as the arena itself. The
128   // creator of the Arena retains ownership of the block after the Arena is
129   // destroyed.
130   char* initial_block = nullptr;
131 
132   // The size of the initial block, if provided.
133   size_t initial_block_size = 0;
134 
135   // A function pointer to an alloc method that returns memory blocks of size
136   // requested. By default, it contains a ptr to the malloc function.
137   //
138   // NOTE: block_alloc and dealloc functions are expected to behave like
139   // malloc and free, including Asan poisoning.
140   void* (*block_alloc)(size_t) = nullptr;
141   // A function pointer to a dealloc method that takes ownership of the blocks
142   // from the arena. By default, it contains a ptr to a wrapper function that
143   // calls free.
144   void (*block_dealloc)(void*, size_t) = nullptr;
145 
146  private:
AllocationPolicyArenaOptions147   internal::AllocationPolicy AllocationPolicy() const {
148     internal::AllocationPolicy res;
149     res.start_block_size = start_block_size;
150     res.max_block_size = max_block_size;
151     res.block_alloc = block_alloc;
152     res.block_dealloc = block_dealloc;
153     return res;
154   }
155 
156   friend class Arena;
157   friend class ArenaOptionsTestFriend;
158 };
159 
160 // Arena allocator. Arena allocation replaces ordinary (heap-based) allocation
161 // with new/delete, and improves performance by aggregating allocations into
162 // larger blocks and freeing allocations all at once. Protocol messages are
163 // allocated on an arena by using Arena::Create<T>(Arena*), below, and are
164 // automatically freed when the arena is destroyed.
165 //
166 // This is a thread-safe implementation: multiple threads may allocate from the
167 // arena concurrently. Destruction is not thread-safe and the destructing
168 // thread must synchronize with users of the arena first.
169 class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
170  public:
171   // Default constructor with sensible default options, tuned for average
172   // use-cases.
Arena()173   inline Arena() : impl_() {}
174 
175   // Construct an arena with default options, except for the supplied
176   // initial block. It is more efficient to use this constructor
177   // instead of passing ArenaOptions if the only configuration needed
178   // by the caller is supplying an initial block.
Arena(char * initial_block,size_t initial_block_size)179   inline Arena(char* initial_block, size_t initial_block_size)
180       : impl_(initial_block, initial_block_size) {}
181 
182   // Arena constructor taking custom options. See ArenaOptions above for
183   // descriptions of the options available.
Arena(const ArenaOptions & options)184   explicit Arena(const ArenaOptions& options)
185       : impl_(options.initial_block, options.initial_block_size,
186               options.AllocationPolicy()) {}
187 
188   // Block overhead.  Use this as a guide for how much to over-allocate the
189   // initial block if you want an allocation of size N to fit inside it.
190   //
191   // WARNING: if you allocate multiple objects, it is difficult to guarantee
192   // that a series of allocations will fit in the initial block, especially if
193   // Arena changes its alignment guarantees in the future!
194   static const size_t kBlockOverhead =
195       internal::ThreadSafeArena::kBlockHeaderSize +
196       internal::ThreadSafeArena::kSerialArenaSize;
197 
198   inline ~Arena() = default;
199 
200 #ifndef PROTOBUF_FUTURE_REMOVE_CREATEMESSAGE
201   // Deprecated. Use Create<T> instead.
202   template <typename T, typename... Args>
203   ABSL_DEPRECATED("Use Create")
CreateMessage(Arena * arena,Args &&...args)204   static T* CreateMessage(Arena* arena, Args&&... args) {
205     using Type = std::remove_const_t<T>;
206     static_assert(
207         is_arena_constructable<Type>::value,
208         "CreateMessage can only construct types that are ArenaConstructable");
209     return Create<Type>(arena, std::forward<Args>(args)...);
210   }
211 #endif  // !PROTOBUF_FUTURE_REMOVE_CREATEMESSAGE
212 
213   // Allocates an object type T if the arena passed in is not nullptr;
214   // otherwise, returns a heap-allocated object.
215   template <typename T, typename... Args>
Create(Arena * arena,Args &&...args)216   PROTOBUF_NDEBUG_INLINE static T* Create(Arena* arena, Args&&... args) {
217     return absl::utility_internal::IfConstexprElse<
218         is_arena_constructable<T>::value>(
219         // Arena-constructable
220         [arena](auto&&... args) {
221           using Type = std::remove_const_t<T>;
222 #ifdef __cpp_if_constexpr
223           // DefaultConstruct/CopyConstruct are optimized for messages, which
224           // are both arena constructible and destructor skippable and they
225           // assume much. Don't use these functions unless the invariants
226           // hold.
227           if constexpr (is_destructor_skippable<T>::value) {
228             constexpr auto construct_type = GetConstructType<T, Args&&...>();
229             // We delegate to DefaultConstruct/CopyConstruct where appropriate
230             // because protobuf generated classes have external templates for
231             // these functions for code size reasons. When `if constexpr` is not
232             // available always use the fallback.
233             if constexpr (construct_type == ConstructType::kDefault) {
234               return static_cast<Type*>(DefaultConstruct<Type>(arena));
235             } else if constexpr (construct_type == ConstructType::kCopy) {
236               return static_cast<Type*>(CopyConstruct<Type>(arena, &args...));
237             }
238           }
239 #endif
240           return CreateArenaCompatible<Type>(arena,
241                                              std::forward<Args>(args)...);
242         },
243         // Non arena-constructable
244         [arena](auto&&... args) {
245           if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
246             return new T(std::forward<Args>(args)...);
247           }
248           return new (arena->AllocateInternal<T>())
249               T(std::forward<Args>(args)...);
250         },
251         std::forward<Args>(args)...);
252   }
253 
254   // API to delete any objects not on an arena.  This can be used to safely
255   // clean up messages or repeated fields without knowing whether or not they're
256   // owned by an arena.  The pointer passed to this function should not be used
257   // again.
258   template <typename T>
Destroy(T * obj)259   PROTOBUF_ALWAYS_INLINE static void Destroy(T* obj) {
260     if (InternalGetArena(obj) == nullptr) delete obj;
261   }
262 
263   // Allocates memory with the specific size and alignment.
264   void* AllocateAligned(size_t size, size_t align = 8) {
265     if (align <= internal::ArenaAlignDefault::align) {
266       return Allocate(internal::ArenaAlignDefault::Ceil(size));
267     } else {
268       // We are wasting space by over allocating align - 8 bytes. Compared
269       // to a dedicated function that takes current alignment in consideration.
270       // Such a scheme would only waste (align - 8)/2 bytes on average, but
271       // requires a dedicated function in the outline arena allocation
272       // functions. Possibly re-evaluate tradeoffs later.
273       auto align_as = internal::ArenaAlignAs(align);
274       return align_as.Ceil(Allocate(align_as.Padded(size)));
275     }
276   }
277 
278   // Create an array of object type T on the arena *without* invoking the
279   // constructor of T. If `arena` is null, then the return value should be freed
280   // with `delete[] x;` (or `::operator delete[](x);`).
281   // To ensure safe uses, this function checks at compile time
282   // (when compiled as C++11) that T is trivially default-constructible and
283   // trivially destructible.
284   template <typename T>
CreateArray(Arena * arena,size_t num_elements)285   PROTOBUF_NDEBUG_INLINE static T* CreateArray(Arena* arena,
286                                                size_t num_elements) {
287     static_assert(std::is_trivial<T>::value,
288                   "CreateArray requires a trivially constructible type");
289     static_assert(std::is_trivially_destructible<T>::value,
290                   "CreateArray requires a trivially destructible type");
291     ABSL_CHECK_LE(num_elements, std::numeric_limits<size_t>::max() / sizeof(T))
292         << "Requested size is too large to fit into size_t.";
293     if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
294       return new T[num_elements];
295     } else {
296       // We count on compiler to realize that if sizeof(T) is a multiple of
297       // 8 AlignUpTo can be elided.
298       return static_cast<T*>(
299           arena->AllocateAlignedForArray(sizeof(T) * num_elements, alignof(T)));
300     }
301   }
302 
303   // The following routines are for monitoring. They will approximate the total
304   // sum allocated and used memory, but the exact value is an implementation
305   // deal. For instance allocated space depends on growth policies. Do not use
306   // these in unit tests. Returns the total space allocated by the arena, which
307   // is the sum of the sizes of the underlying blocks.
SpaceAllocated()308   uint64_t SpaceAllocated() const { return impl_.SpaceAllocated(); }
309   // Returns the total space used by the arena. Similar to SpaceAllocated but
310   // does not include free space and block overhead.  This is a best-effort
311   // estimate and may inaccurately calculate space used by other threads
312   // executing concurrently with the call to this method.  These inaccuracies
313   // are due to race conditions, and are bounded but unpredictable.  Stale data
314   // can lead to underestimates of the space used, and race conditions can lead
315   // to overestimates (up to the current block size).
SpaceUsed()316   uint64_t SpaceUsed() const { return impl_.SpaceUsed(); }
317 
318   // Frees all storage allocated by this arena after calling destructors
319   // registered with OwnDestructor() and freeing objects registered with Own().
320   // Any objects allocated on this arena are unusable after this call. It also
321   // returns the total space used by the arena which is the sums of the sizes
322   // of the allocated blocks. This method is not thread-safe.
Reset()323   uint64_t Reset() { return impl_.Reset(); }
324 
325   // Adds |object| to a list of heap-allocated objects to be freed with |delete|
326   // when the arena is destroyed or reset.
327   template <typename T>
Own(T * object)328   PROTOBUF_ALWAYS_INLINE void Own(T* object) {
329     // Collapsing all template instantiations to one for generic Message reduces
330     // code size, using the virtual destructor instead.
331     using TypeToUse =
332         std::conditional_t<std::is_convertible<T*, MessageLite*>::value,
333                            MessageLite, T>;
334     if (object != nullptr) {
335       impl_.AddCleanup(static_cast<TypeToUse*>(object),
336                        &internal::arena_delete_object<TypeToUse>);
337     }
338   }
339 
340   // Adds |object| to a list of objects whose destructors will be manually
341   // called when the arena is destroyed or reset. This differs from Own() in
342   // that it does not free the underlying memory with |delete|; hence, it is
343   // normally only used for objects that are placement-newed into
344   // arena-allocated memory.
345   template <typename T>
OwnDestructor(T * object)346   PROTOBUF_ALWAYS_INLINE void OwnDestructor(T* object) {
347     if (object != nullptr) {
348       impl_.AddCleanup(object, &internal::cleanup::arena_destruct_object<T>);
349     }
350   }
351 
352   // Adds a custom member function on an object to the list of destructors that
353   // will be manually called when the arena is destroyed or reset. This differs
354   // from OwnDestructor() in that any member function may be specified, not only
355   // the class destructor.
OwnCustomDestructor(void * object,void (* destruct)(void *))356   PROTOBUF_ALWAYS_INLINE void OwnCustomDestructor(void* object,
357                                                   void (*destruct)(void*)) {
358     impl_.AddCleanup(object, destruct);
359   }
360 
361   // Retrieves the arena associated with |value| if |value| is an arena-capable
362   // message, or nullptr otherwise. If possible, the call resolves at compile
363   // time. Note that we can often devirtualize calls to `value->GetArena()` so
364   // usually calling this method is unnecessary.
365   // TODO: remove this function.
366   template <typename T>
367   ABSL_DEPRECATED(
368       "This will be removed in a future release. Call value->GetArena() "
369       "instead.")
GetArena(T * value)370   PROTOBUF_ALWAYS_INLINE static Arena* GetArena(T* value) {
371     return GetArenaInternal(value);
372   }
373 
374   template <typename T>
375   class InternalHelper {
376    private:
377     // A SFINAE friendly trait that probes for `U` but always evalues to
378     // `Arena*`.
379     template <typename U>
380     using EnableIfArena =
381         typename std::enable_if<std::is_same<Arena*, U>::value, Arena*>::type;
382 
383     // Use go/ranked-overloads for dispatching.
384     struct Rank0 {};
385     struct Rank1 : Rank0 {};
386 
InternalSwap(T * a,T * b)387     static void InternalSwap(T* a, T* b) { a->InternalSwap(b); }
388 
GetArena(T * p)389     static Arena* GetArena(T* p) { return GetArena(Rank1{}, p); }
390 
391     template <typename U>
392     static auto GetArena(Rank1,
393                          U* p) -> EnableIfArena<decltype(p->GetArena())> {
394       return p->GetArena();
395     }
396 
397     template <typename U>
GetArena(Rank0,U *)398     static Arena* GetArena(Rank0, U*) {
399       return nullptr;
400     }
401 
402     // If an object type T satisfies the appropriate protocol, it is deemed
403     // "arena compatible" and handled more efficiently because this interface
404     // (i) passes the arena pointer to the created object so that its
405     // sub-objects and internal allocations can use the arena too, and (ii)
406     // elides the object's destructor call when possible; e.g. protobuf
407     // messages, RepeatedField, etc. Otherwise, the arena will invoke the
408     // object's destructor when the arena is destroyed.
409     //
410     // To be "arena-compatible", a type T must satisfy the following:
411     //
412     // - The type T must have (at least) two constructors: a constructor
413     //   callable with `args` (without `arena`), called when a T is allocated on
414     //   the heap; and a constructor callable with `Arena* arena, Args&&...
415     //   args`, called when a T is allocated on an arena. If the second
416     //   constructor is called with a null arena pointer, it must be equivalent
417     //   to invoking the first
418     //   (`args`-only) constructor.
419     //
420     // - The type T must have a particular type trait: a nested type
421     //   |InternalArenaConstructable_|. This is usually a typedef to |void|.
422     //
423     // - The type T *may* have the type trait |DestructorSkippable_|. If this
424     //   type trait is present in the type, then its destructor will not be
425     //   called if and only if it was passed a non-null arena pointer. If this
426     //   type trait is not present on the type, then its destructor is always
427     //   called when the containing arena is destroyed.
428     //
429     // The protocol is implemented by all protobuf message classes as well as
430     // protobuf container types like RepeatedPtrField and Map. It is internal to
431     // protobuf and is not guaranteed to be stable. Non-proto types should not
432     // rely on this protocol.
433     template <typename U>
434     static char DestructorSkippable(const typename U::DestructorSkippable_*);
435     template <typename U>
436     static double DestructorSkippable(...);
437 
438     typedef std::integral_constant<
439         bool, sizeof(DestructorSkippable<T>(static_cast<const T*>(0))) ==
440                       sizeof(char) ||
441                   std::is_trivially_destructible<T>::value>
442         is_destructor_skippable;
443 
444     template <typename U>
445     static char ArenaConstructable(
446         const typename U::InternalArenaConstructable_*);
447     template <typename U>
448     static double ArenaConstructable(...);
449 
450     typedef std::integral_constant<bool, sizeof(ArenaConstructable<T>(
451                                              static_cast<const T*>(0))) ==
452                                              sizeof(char)>
453         is_arena_constructable;
454 
455 
456     template <typename... Args>
Construct(void * ptr,Args &&...args)457     static T* Construct(void* ptr, Args&&... args) {
458       return new (ptr) T(static_cast<Args&&>(args)...);
459     }
460 
New()461     static inline PROTOBUF_ALWAYS_INLINE T* New() {
462       return new T(nullptr);
463     }
464 
465     friend class Arena;
466     friend class TestUtil::ReflectionTester;
467   };
468 
469   // Provides access to protected GetArena to generated messages.
470   // For internal use only.
471   template <typename T>
InternalGetArena(T * p)472   static Arena* InternalGetArena(T* p) {
473     return InternalHelper<T>::GetArena(p);
474   }
475 
476   // Helper typetraits that indicates support for arenas in a type T at compile
477   // time. This is public only to allow construction of higher-level templated
478   // utilities.
479   //
480   // is_arena_constructable<T>::value is true if the message type T has arena
481   // support enabled, and false otherwise.
482   //
483   // is_destructor_skippable<T>::value is true if the message type T has told
484   // the arena that it is safe to skip the destructor, and false otherwise.
485   //
486   // This is inside Arena because only Arena has the friend relationships
487   // necessary to see the underlying generated code traits.
488   template <typename T>
489   struct is_arena_constructable : InternalHelper<T>::is_arena_constructable {};
490   template <typename T>
491   struct is_destructor_skippable : InternalHelper<T>::is_destructor_skippable {
492   };
493 
494  private:
495   internal::ThreadSafeArena impl_;
496 
497   enum class ConstructType { kUnknown, kDefault, kCopy, kMove };
498   // Overload set to detect which kind of construction is going to happen for a
499   // specific set of input arguments. This is used to dispatch to different
500   // helper functions.
501   template <typename T>
502   static auto ProbeConstructType()
503       -> std::integral_constant<ConstructType, ConstructType::kDefault>;
504   template <typename T>
505   static auto ProbeConstructType(const T&)
506       -> std::integral_constant<ConstructType, ConstructType::kCopy>;
507   template <typename T>
508   static auto ProbeConstructType(T&)
509       -> std::integral_constant<ConstructType, ConstructType::kCopy>;
510   template <typename T>
511   static auto ProbeConstructType(const T&&)
512       -> std::integral_constant<ConstructType, ConstructType::kCopy>;
513   template <typename T>
514   static auto ProbeConstructType(T&&)
515       -> std::integral_constant<ConstructType, ConstructType::kMove>;
516   template <typename T, typename... U>
517   static auto ProbeConstructType(U&&...)
518       -> std::integral_constant<ConstructType, ConstructType::kUnknown>;
519 
520   template <typename T, typename... Args>
GetConstructType()521   static constexpr auto GetConstructType() {
522     return std::is_base_of<MessageLite, T>::value
523                ? decltype(ProbeConstructType<T>(std::declval<Args>()...))::value
524                : ConstructType::kUnknown;
525   }
526 
ReturnArrayMemory(void * p,size_t size)527   void ReturnArrayMemory(void* p, size_t size) {
528     impl_.ReturnArrayMemory(p, size);
529   }
530 
531   template <typename T, typename... Args>
CreateArenaCompatible(Arena * arena,Args &&...args)532   PROTOBUF_NDEBUG_INLINE static T* CreateArenaCompatible(Arena* arena,
533                                                          Args&&... args) {
534     static_assert(is_arena_constructable<T>::value,
535                   "Can only construct types that are ArenaConstructable");
536     if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
537       return new T(nullptr, static_cast<Args&&>(args)...);
538     } else {
539       return arena->DoCreateMessage<T>(static_cast<Args&&>(args)...);
540     }
541   }
542 
543   // This specialization for no arguments is necessary, because its behavior is
544   // slightly different.  When the arena pointer is nullptr, it calls T()
545   // instead of T(nullptr).
546   template <typename T>
CreateArenaCompatible(Arena * arena)547   PROTOBUF_NDEBUG_INLINE static T* CreateArenaCompatible(Arena* arena) {
548     static_assert(is_arena_constructable<T>::value,
549                   "Can only construct types that are ArenaConstructable");
550     if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
551       // Generated arena constructor T(Arena*) is protected. Call via
552       // InternalHelper.
553       return InternalHelper<T>::New();
554     } else {
555       return arena->DoCreateMessage<T>();
556     }
557   }
558 
559   template <typename T, bool trivial = std::is_trivially_destructible<T>::value>
AllocateInternal()560   PROTOBUF_NDEBUG_INLINE void* AllocateInternal() {
561     if (trivial) {
562       return AllocateAligned(sizeof(T), alignof(T));
563     } else {
564       // We avoid instantiating arena_destruct_object<T> in the trivial case.
565       constexpr auto dtor = &internal::cleanup::arena_destruct_object<
566           std::conditional_t<trivial, std::string, T>>;
567       return AllocateAlignedWithCleanup(sizeof(T), alignof(T), dtor);
568     }
569   }
570 
571   // DefaultConstruct/CopyConstruct:
572   //
573   // Functions with a generic signature to support taking the address in generic
574   // contexts, like RepeatedPtrField, etc.
575   // These are also used as a hook for `extern template` instantiations where
576   // codegen can offload the instantiations to the respective .pb.cc files. This
577   // has two benefits:
578   //  - It reduces the library bloat as callers don't have to instantiate the
579   //  function.
580   //  - It allows the optimizer to see the constructors called to
581   //  further optimize the instantiation.
582   template <typename T>
583   static void* DefaultConstruct(Arena* arena);
584   template <typename T>
585   static void* CopyConstruct(Arena* arena, const void* from);
586 
587   template <typename T, typename... Args>
DoCreateMessage(Args &&...args)588   PROTOBUF_NDEBUG_INLINE T* DoCreateMessage(Args&&... args) {
589     return InternalHelper<T>::Construct(
590         AllocateInternal<T, is_destructor_skippable<T>::value>(), this,
591         std::forward<Args>(args)...);
592   }
593 
594   // CreateInArenaStorage is used to implement map field. Without it,
595   // Map need to call generated message's protected arena constructor,
596   // which needs to declare Map as friend of generated message.
597   template <typename T, typename... Args>
CreateInArenaStorage(T * ptr,Arena * arena,Args &&...args)598   static void CreateInArenaStorage(T* ptr, Arena* arena, Args&&... args) {
599     CreateInArenaStorageInternal(ptr, arena, is_arena_constructable<T>(),
600                                  std::forward<Args>(args)...);
601     if (PROTOBUF_PREDICT_TRUE(arena != nullptr)) {
602       RegisterDestructorInternal(ptr, arena, is_destructor_skippable<T>());
603     }
604   }
605 
606   template <typename T, typename... Args>
CreateInArenaStorageInternal(T * ptr,Arena * arena,std::true_type,Args &&...args)607   static void CreateInArenaStorageInternal(T* ptr, Arena* arena,
608                                            std::true_type, Args&&... args) {
609     InternalHelper<T>::Construct(ptr, arena, std::forward<Args>(args)...);
610   }
611   template <typename T, typename... Args>
CreateInArenaStorageInternal(T * ptr,Arena *,std::false_type,Args &&...args)612   static void CreateInArenaStorageInternal(T* ptr, Arena* /* arena */,
613                                            std::false_type, Args&&... args) {
614     new (ptr) T(std::forward<Args>(args)...);
615   }
616 
617   template <typename T>
RegisterDestructorInternal(T *,Arena *,std::true_type)618   static void RegisterDestructorInternal(T* /* ptr */, Arena* /* arena */,
619                                          std::true_type) {}
620   template <typename T>
RegisterDestructorInternal(T * ptr,Arena * arena,std::false_type)621   static void RegisterDestructorInternal(T* ptr, Arena* arena,
622                                          std::false_type) {
623     arena->OwnDestructor(ptr);
624   }
625 
626   // Implementation for GetArena(). Only message objects with
627   // InternalArenaConstructable_ tags can be associated with an arena, and such
628   // objects must implement a GetArena() method.
629   template <typename T>
GetArenaInternal(T * value)630   PROTOBUF_ALWAYS_INLINE static Arena* GetArenaInternal(T* value) {
631     return InternalHelper<T>::GetArena(value);
632   }
633 
AllocateAlignedForArray(size_t n,size_t align)634   void* AllocateAlignedForArray(size_t n, size_t align) {
635     if (align <= internal::ArenaAlignDefault::align) {
636       return AllocateForArray(internal::ArenaAlignDefault::Ceil(n));
637     } else {
638       // We are wasting space by over allocating align - 8 bytes. Compared
639       // to a dedicated function that takes current alignment in consideration.
640       // Such a scheme would only waste (align - 8)/2 bytes on average, but
641       // requires a dedicated function in the outline arena allocation
642       // functions. Possibly re-evaluate tradeoffs later.
643       auto align_as = internal::ArenaAlignAs(align);
644       return align_as.Ceil(AllocateForArray(align_as.Padded(n)));
645     }
646   }
647 
648   void* Allocate(size_t n);
649   void* AllocateForArray(size_t n);
650   void* AllocateAlignedWithCleanup(size_t n, size_t align,
651                                    void (*destructor)(void*));
652 
653   // Test only API.
654   // It returns the objects that are in the cleanup list for the current
655   // SerialArena. This API is meant for tests that want to see if something was
656   // added or not to the cleanup list. Sometimes adding something to the cleanup
657   // list has no visible side effect so peeking into the list is the only way to
658   // test.
659   std::vector<void*> PeekCleanupListForTesting();
660 
661   template <typename Type>
662   friend class internal::GenericTypeHandler;
663   friend class internal::InternalMetadata;  // For user_arena().
664   friend class internal::LazyField;         // For DefaultConstruct.
665   friend class internal::EpsCopyInputStream;  // For parser performance
666   friend class internal::TcParser;            // For parser performance
667   friend class MessageLite;
668   template <typename Key, typename T>
669   friend class Map;
670   template <typename>
671   friend class RepeatedField;                   // For ReturnArrayMemory
672   friend class internal::RepeatedPtrFieldBase;  // For ReturnArrayMemory
673   friend class internal::UntypedMapBase;        // For ReturnArrayMemory
674   friend class internal::ExtensionSet;          // For ReturnArrayMemory
675 
676   friend struct internal::ArenaTestPeer;
677 };
678 
679 // DefaultConstruct/CopyConstruct
680 //
681 // IMPORTANT: These have to be defined out of line and without an `inline`
682 // keyword to make sure the `extern template` suppresses instantiations.
683 template <typename T>
DefaultConstruct(Arena * arena)684 PROTOBUF_NOINLINE void* Arena::DefaultConstruct(Arena* arena) {
685   static_assert(is_destructor_skippable<T>::value, "");
686   void* mem = arena != nullptr ? arena->AllocateAligned(sizeof(T))
687                                : ::operator new(sizeof(T));
688   return new (mem) T(arena);
689 }
690 
691 template <typename T>
CopyConstruct(Arena * arena,const void * from)692 PROTOBUF_NOINLINE void* Arena::CopyConstruct(Arena* arena, const void* from) {
693   // If the object is larger than half a cache line, prefetch it.
694   // This way of prefetching is a little more aggressive than if we
695   // condition off a whole cache line, but benchmarks show better results.
696   if (sizeof(T) > ABSL_CACHELINE_SIZE / 2) {
697     PROTOBUF_PREFETCH_WITH_OFFSET(from, 64);
698   }
699   static_assert(is_destructor_skippable<T>::value, "");
700   void* mem = arena != nullptr ? arena->AllocateAligned(sizeof(T))
701                                : ::operator new(sizeof(T));
702   return new (mem) T(arena, *static_cast<const T*>(from));
703 }
704 
705 template <>
706 inline void* Arena::AllocateInternal<std::string, false>() {
707   return impl_.AllocateFromStringBlock();
708 }
709 
710 }  // namespace protobuf
711 }  // namespace google
712 
713 #include "google/protobuf/port_undef.inc"
714 
715 #endif  // GOOGLE_PROTOBUF_ARENA_H__
716