1 //
2 //
3 // Copyright 2017 gRPC authors.
4 //
5 // Licensed under the Apache License, Version 2.0 (the "License");
6 // you may not use this file except in compliance with the License.
7 // You may obtain a copy of the License at
8 //
9 // http://www.apache.org/licenses/LICENSE-2.0
10 //
11 // Unless required by applicable law or agreed to in writing, software
12 // distributed under the License is distributed on an "AS IS" BASIS,
13 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 // See the License for the specific language governing permissions and
15 // limitations under the License.
16 //
17 //
18
19 // \file Arena based allocator
20 // Allows very fast allocation of memory, but that memory cannot be freed until
21 // the arena as a whole is freed
22 // Tracks the total memory allocated against it, so that future arenas can
23 // pre-allocate the right amount of memory
24
25 #ifndef GRPC_SRC_CORE_LIB_RESOURCE_QUOTA_ARENA_H
26 #define GRPC_SRC_CORE_LIB_RESOURCE_QUOTA_ARENA_H
27
28 #include <grpc/event_engine/memory_allocator.h>
29 #include <grpc/support/port_platform.h>
30 #include <stddef.h>
31
32 #include <atomic>
33 #include <iosfwd>
34 #include <memory>
35 #include <utility>
36
37 #include "src/core/lib/promise/context.h"
38 #include "src/core/lib/resource_quota/memory_quota.h"
39 #include "src/core/util/alloc.h"
40 #include "src/core/util/construct_destruct.h"
41
42 namespace grpc_core {
43
44 class Arena;
45
46 template <typename T>
47 struct ArenaContextType;
48
49 namespace arena_detail {
50
51 // Tracks all registered arena context types (these should only be registered
52 // via ArenaContextTraits at static initialization time).
53 class BaseArenaContextTraits {
54 public:
55 // Count of number of contexts that have been allocated.
NumContexts()56 static uint16_t NumContexts() {
57 return static_cast<uint16_t>(RegisteredTraits().size());
58 }
59
60 // Number of bytes required to store the context pointers on an arena.
ContextSize()61 static size_t ContextSize() { return NumContexts() * sizeof(void*); }
62
63 // Call the registered destruction function for a context.
Destroy(uint16_t id,void * ptr)64 static void Destroy(uint16_t id, void* ptr) {
65 if (ptr == nullptr) return;
66 RegisteredTraits()[id](ptr);
67 }
68
69 protected:
70 // Allocate a new context id and register the destruction function.
MakeId(void (* destroy)(void * ptr))71 static uint16_t MakeId(void (*destroy)(void* ptr)) {
72 auto& traits = RegisteredTraits();
73 const uint16_t id = static_cast<uint16_t>(traits.size());
74 traits.push_back(destroy);
75 return id;
76 }
77
78 private:
RegisteredTraits()79 static std::vector<void (*)(void*)>& RegisteredTraits() {
80 static NoDestruct<std::vector<void (*)(void*)>> registered_traits;
81 return *registered_traits;
82 }
83 };
84
85 // Traits for a specific context type.
86 template <typename T>
87 class ArenaContextTraits : public BaseArenaContextTraits {
88 public:
id()89 GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION static uint16_t id() { return id_; }
90
91 private:
92 static const uint16_t id_;
93 };
94
95 template <typename T>
DestroyArenaContext(void * p)96 GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION inline void DestroyArenaContext(void* p) {
97 ArenaContextType<T>::Destroy(static_cast<T*>(p));
98 }
99
100 template <typename T>
101 const uint16_t ArenaContextTraits<T>::id_ =
102 BaseArenaContextTraits::MakeId(DestroyArenaContext<T>);
103
104 template <typename T, typename A, typename B>
105 struct IfArray {
106 using Result = A;
107 };
108
109 template <typename T, typename A, typename B>
110 struct IfArray<T[], A, B> {
111 using Result = B;
112 };
113
114 struct UnrefDestroy {
115 void operator()(const Arena* arena) const;
116 };
117
118 } // namespace arena_detail
119
120 class ArenaFactory : public RefCounted<ArenaFactory> {
121 public:
122 virtual RefCountedPtr<Arena> MakeArena() = 0;
123 virtual void FinalizeArena(Arena* arena) = 0;
124
125 MemoryAllocator& allocator() { return allocator_; }
126
127 protected:
128 explicit ArenaFactory(MemoryAllocator allocator)
129 : allocator_(std::move(allocator)) {}
130
131 private:
132 MemoryAllocator allocator_;
133 };
134
135 MemoryAllocator DefaultMemoryAllocatorForSimpleArenaAllocator();
136 RefCountedPtr<ArenaFactory> SimpleArenaAllocator(
137 size_t initial_size = 1024,
138 MemoryAllocator allocator =
139 DefaultMemoryAllocatorForSimpleArenaAllocator());
140
141 class Arena final : public RefCounted<Arena, NonPolymorphicRefCount,
142 arena_detail::UnrefDestroy> {
143 public:
144 // Create an arena, with \a initial_size bytes in the first allocated buffer.
145 static RefCountedPtr<Arena> Create(size_t initial_size,
146 RefCountedPtr<ArenaFactory> arena_factory);
147
148 // Destroy all `ManagedNew` allocated objects.
149 // Allows safe destruction of these objects even if they need context held by
150 // the arena.
151 // Idempotent.
152 // TODO(ctiller): eliminate ManagedNew.
153 void DestroyManagedNewObjects();
154
155 // Return the total amount of memory allocated by this arena.
156 size_t TotalUsedBytes() const {
157 return total_used_.load(std::memory_order_relaxed);
158 }
159
160 // Allocate \a size bytes from the arena.
161 void* Alloc(size_t size) {
162 size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(size);
163 size_t begin = total_used_.fetch_add(size, std::memory_order_relaxed);
164 if (begin + size <= initial_zone_size_) {
165 return reinterpret_cast<char*>(this) + begin;
166 } else {
167 return AllocZone(size);
168 }
169 }
170
171 // Allocates T from the arena.
172 // The caller is responsible for calling p->~T(), but should NOT delete.
173 // TODO(roth): We currently assume that all callers need alignment of 16
174 // bytes, which may be wrong in some cases. When we have time, we should
175 // change this to instead use the alignment of the type being allocated by
176 // this method.
177 template <typename T, typename... Args>
178 T* New(Args&&... args) {
179 T* t = static_cast<T*>(Alloc(sizeof(T)));
180 new (t) T(std::forward<Args>(args)...);
181 return t;
182 }
183
184 // Like New, but has the arena call p->~T() at arena destruction time.
185 // The caller should NOT delete.
186 template <typename T, typename... Args>
187 T* ManagedNew(Args&&... args) {
188 auto* p = New<ManagedNewImpl<T>>(std::forward<Args>(args)...);
189 p->Link(&managed_new_head_);
190 return &p->t;
191 }
192
193 template <typename T, typename... Args>
194 absl::enable_if_t<std::is_same<typename T::RefCountedUnrefBehaviorType,
195 UnrefCallDtor>::value,
196 RefCountedPtr<T>>
197 MakeRefCounted(Args&&... args) {
198 return RefCountedPtr<T>(New<T>(std::forward<Args>(args)...));
199 }
200
201 class PooledDeleter {
202 public:
203 PooledDeleter() = default;
204 explicit PooledDeleter(std::nullptr_t) : delete_(false) {}
205 template <typename T>
206 void operator()(T* p) {
207 // TODO(ctiller): promise based filter hijacks ownership of some pointers
208 // to make them appear as PoolPtr without really transferring ownership,
209 // by setting the arena to nullptr.
210 // This is a transitional hack and should be removed once promise based
211 // filter is removed.
212 if (delete_) delete p;
213 }
214
215 bool has_freelist() const { return delete_; }
216
217 private:
218 bool delete_ = true;
219 };
220
221 class ArrayPooledDeleter {
222 public:
223 ArrayPooledDeleter() = default;
224 explicit ArrayPooledDeleter(std::nullptr_t) : delete_(false) {}
225 template <typename T>
226 void operator()(T* p) {
227 // TODO(ctiller): promise based filter hijacks ownership of some pointers
228 // to make them appear as PoolPtr without really transferring ownership,
229 // by setting the arena to nullptr.
230 // This is a transitional hack and should be removed once promise based
231 // filter is removed.
232 if (delete_) delete[] p;
233 }
234
235 bool has_freelist() const { return delete_; }
236
237 private:
238 bool delete_ = true;
239 };
240
241 template <typename T>
242 using PoolPtr =
243 std::unique_ptr<T, typename arena_detail::IfArray<
244 T, PooledDeleter, ArrayPooledDeleter>::Result>;
245
246 // Make a unique_ptr to T that is allocated from the arena.
247 // When the pointer is released, the memory may be reused for other
248 // MakePooled(.*) calls.
249 // CAUTION: The amount of memory allocated is rounded up to the nearest
250 // value in Arena::PoolSizes, and so this may pessimize total
251 // arena size.
252 template <typename T, typename... Args>
253 static PoolPtr<T> MakePooled(Args&&... args) {
254 return PoolPtr<T>(new T(std::forward<Args>(args)...), PooledDeleter());
255 }
256
257 template <typename T>
258 static PoolPtr<T> MakePooledForOverwrite() {
259 return PoolPtr<T>(new T, PooledDeleter());
260 }
261
262 // Make a unique_ptr to an array of T that is allocated from the arena.
263 // When the pointer is released, the memory may be reused for other
264 // MakePooled(.*) calls.
265 // One can use MakePooledArray<char> to allocate a buffer of bytes.
266 // CAUTION: The amount of memory allocated is rounded up to the nearest
267 // value in Arena::PoolSizes, and so this may pessimize total
268 // arena size.
269 template <typename T>
270 PoolPtr<T[]> MakePooledArray(size_t n) {
271 return PoolPtr<T[]>(new T[n], ArrayPooledDeleter());
272 }
273
274 // Like MakePooled, but with manual memory management.
275 // The caller is responsible for calling DeletePooled() on the returned
276 // pointer, and expected to call it with the same type T as was passed to this
277 // function (else the free list returned to the arena will be corrupted).
278 template <typename T, typename... Args>
279 T* NewPooled(Args&&... args) {
280 return new T(std::forward<Args>(args)...);
281 }
282
283 template <typename T>
284 void DeletePooled(T* p) {
285 delete p;
286 }
287
288 // Context accessors
289 // Prefer to use the free-standing `GetContext<>` and `SetContext<>` functions
290 // for modern promise-based code -- however legacy filter stack based code
291 // often needs to access these directly.
292 template <typename T>
293 GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION T* GetContext() {
294 return static_cast<T*>(
295 contexts()[arena_detail::ArenaContextTraits<T>::id()]);
296 }
297
298 template <typename T>
299 void SetContext(T* context) {
300 void*& slot = contexts()[arena_detail::ArenaContextTraits<T>::id()];
301 if (slot != nullptr) {
302 ArenaContextType<T>::Destroy(static_cast<T*>(slot));
303 }
304 slot = context;
305 DCHECK_EQ(GetContext<T>(), context);
306 }
307
308 static size_t ArenaOverhead() {
309 return GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Arena));
310 }
311 static size_t ArenaZoneOverhead() {
312 return GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Zone));
313 }
314
315 private:
316 friend struct arena_detail::UnrefDestroy;
317
318 struct Zone {
319 Zone* prev;
320 };
321
322 struct ManagedNewObject {
323 ManagedNewObject* next = nullptr;
324 void Link(std::atomic<ManagedNewObject*>* head);
325 virtual ~ManagedNewObject() = default;
326 };
327
328 template <typename T>
329 struct ManagedNewImpl : public ManagedNewObject {
330 T t;
331 template <typename... Args>
332 explicit ManagedNewImpl(Args&&... args) : t(std::forward<Args>(args)...) {}
333 };
334
335 // Initialize an arena.
336 // Parameters:
337 // initial_size: The initial size of the whole arena in bytes. These bytes
338 // are contained within 'zone 0'. If the arena user ends up requiring more
339 // memory than the arena contains in zone 0, subsequent zones are allocated
340 // on demand and maintained in a tail-linked list.
341 //
342 // initial_alloc: Optionally, construct the arena as though a call to
343 // Alloc() had already been made for initial_alloc bytes. This provides a
344 // quick optimization (avoiding an atomic fetch-add) for the common case
345 // where we wish to create an arena and then perform an immediate
346 // allocation.
347 explicit Arena(size_t initial_size,
348 RefCountedPtr<ArenaFactory> arena_factory);
349
350 ~Arena();
351
352 void* AllocZone(size_t size);
353 void Destroy() const;
354 GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION void** contexts() {
355 return reinterpret_cast<void**>(this + 1);
356 }
357
358 // Keep track of the total used size. We use this in our call sizing
359 // hysteresis.
360 const size_t initial_zone_size_;
361 std::atomic<size_t> total_used_;
362 std::atomic<size_t> total_allocated_{initial_zone_size_};
363 // If the initial arena allocation wasn't enough, we allocate additional zones
364 // in a reverse linked list. Each additional zone consists of (1) a pointer to
365 // the zone added before this zone (null if this is the first additional zone)
366 // and (2) the allocated memory. The arena itself maintains a pointer to the
367 // last zone; the zone list is reverse-walked during arena destruction only.
368 std::atomic<Zone*> last_zone_{nullptr};
369 std::atomic<ManagedNewObject*> managed_new_head_{nullptr};
370 RefCountedPtr<ArenaFactory> arena_factory_;
371 };
372
373 // Arenas form a context for activities
374 template <>
375 struct ContextType<Arena> {};
376
377 namespace arena_detail {
378 inline void UnrefDestroy::operator()(const Arena* arena) const {
379 arena->Destroy();
380 }
381 } // namespace arena_detail
382
383 namespace promise_detail {
384
385 template <typename T>
386 class Context<T, absl::void_t<decltype(ArenaContextType<T>::Destroy)>> {
387 public:
388 GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION static T* get() {
389 return GetContext<Arena>()->GetContext<T>();
390 }
391 GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION static void set(T* value) {
392 GetContext<Arena>()->SetContext(value);
393 }
394 };
395
396 } // namespace promise_detail
397
398 } // namespace grpc_core
399
400 #endif // GRPC_SRC_CORE_LIB_RESOURCE_QUOTA_ARENA_H
401