• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 //
3 // Copyright 2017 gRPC authors.
4 //
5 // Licensed under the Apache License, Version 2.0 (the "License");
6 // you may not use this file except in compliance with the License.
7 // You may obtain a copy of the License at
8 //
9 //     http://www.apache.org/licenses/LICENSE-2.0
10 //
11 // Unless required by applicable law or agreed to in writing, software
12 // distributed under the License is distributed on an "AS IS" BASIS,
13 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 // See the License for the specific language governing permissions and
15 // limitations under the License.
16 //
17 //
18 
19 #include "src/core/lib/resource_quota/arena.h"
20 
21 #include <grpc/support/alloc.h>
22 #include <grpc/support/port_platform.h>
23 
24 #include <atomic>
25 #include <new>
26 
27 #include "absl/log/log.h"
28 #include "src/core/lib/resource_quota/resource_quota.h"
29 #include "src/core/util/alloc.h"
30 namespace grpc_core {
31 
32 namespace {
33 
ArenaStorage(size_t & initial_size)34 void* ArenaStorage(size_t& initial_size) {
35   size_t base_size = Arena::ArenaOverhead() +
36                      GPR_ROUND_UP_TO_ALIGNMENT_SIZE(
37                          arena_detail::BaseArenaContextTraits::ContextSize());
38   initial_size =
39       std::max(GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_size), base_size);
40   static constexpr size_t alignment =
41       (GPR_CACHELINE_SIZE > GPR_MAX_ALIGNMENT &&
42        GPR_CACHELINE_SIZE % GPR_MAX_ALIGNMENT == 0)
43           ? GPR_CACHELINE_SIZE
44           : GPR_MAX_ALIGNMENT;
45   return gpr_malloc_aligned(initial_size, alignment);
46 }
47 
48 }  // namespace
49 
~Arena()50 Arena::~Arena() {
51   for (size_t i = 0; i < arena_detail::BaseArenaContextTraits::NumContexts();
52        ++i) {
53     arena_detail::BaseArenaContextTraits::Destroy(i, contexts()[i]);
54   }
55   DestroyManagedNewObjects();
56   arena_factory_->FinalizeArena(this);
57   arena_factory_->allocator().Release(
58       total_allocated_.load(std::memory_order_relaxed));
59   Zone* z = last_zone_;
60   while (z) {
61     Zone* prev_z = z->prev;
62     Destruct(z);
63     gpr_free_aligned(z);
64     z = prev_z;
65   }
66 }
67 
Create(size_t initial_size,RefCountedPtr<ArenaFactory> arena_factory)68 RefCountedPtr<Arena> Arena::Create(size_t initial_size,
69                                    RefCountedPtr<ArenaFactory> arena_factory) {
70   void* p = ArenaStorage(initial_size);
71   return RefCountedPtr<Arena>(
72       new (p) Arena(initial_size, std::move(arena_factory)));
73 }
74 
Arena(size_t initial_size,RefCountedPtr<ArenaFactory> arena_factory)75 Arena::Arena(size_t initial_size, RefCountedPtr<ArenaFactory> arena_factory)
76     : initial_zone_size_(initial_size),
77       total_used_(ArenaOverhead() +
78                   GPR_ROUND_UP_TO_ALIGNMENT_SIZE(
79                       arena_detail::BaseArenaContextTraits::ContextSize())),
80       arena_factory_(std::move(arena_factory)) {
81   for (size_t i = 0; i < arena_detail::BaseArenaContextTraits::NumContexts();
82        ++i) {
83     contexts()[i] = nullptr;
84   }
85   CHECK_GE(initial_size, arena_detail::BaseArenaContextTraits::ContextSize());
86   arena_factory_->allocator().Reserve(initial_size);
87 }
88 
DestroyManagedNewObjects()89 void Arena::DestroyManagedNewObjects() {
90   ManagedNewObject* p;
91   // Outer loop: clear the managed new object list.
92   // We do this repeatedly in case a destructor ends up allocating something.
93   while ((p = managed_new_head_.exchange(nullptr, std::memory_order_relaxed)) !=
94          nullptr) {
95     // Inner loop: destruct a batch of objects.
96     while (p != nullptr) {
97       Destruct(std::exchange(p, p->next));
98     }
99   }
100 }
101 
Destroy() const102 void Arena::Destroy() const {
103   this->~Arena();
104   gpr_free_aligned(const_cast<Arena*>(this));
105 }
106 
AllocZone(size_t size)107 void* Arena::AllocZone(size_t size) {
108   // If the allocation isn't able to end in the initial zone, create a new
109   // zone for this allocation, and any unused space in the initial zone is
110   // wasted. This overflowing and wasting is uncommon because of our arena
111   // sizing hysteresis (that is, most calls should have a large enough initial
112   // zone and will not need to grow the arena).
113   static constexpr size_t zone_base_size =
114       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Zone));
115   size_t alloc_size = zone_base_size + size;
116   arena_factory_->allocator().Reserve(alloc_size);
117   total_allocated_.fetch_add(alloc_size, std::memory_order_relaxed);
118   Zone* z = new (gpr_malloc_aligned(alloc_size, GPR_MAX_ALIGNMENT)) Zone();
119   auto* prev = last_zone_.load(std::memory_order_relaxed);
120   do {
121     z->prev = prev;
122   } while (!last_zone_.compare_exchange_weak(prev, z, std::memory_order_relaxed,
123                                              std::memory_order_relaxed));
124   return reinterpret_cast<char*>(z) + zone_base_size;
125 }
126 
Link(std::atomic<ManagedNewObject * > * head)127 void Arena::ManagedNewObject::Link(std::atomic<ManagedNewObject*>* head) {
128   next = head->load(std::memory_order_relaxed);
129   while (!head->compare_exchange_weak(next, this, std::memory_order_acq_rel,
130                                       std::memory_order_relaxed)) {
131   }
132 }
133 
DefaultMemoryAllocatorForSimpleArenaAllocator()134 MemoryAllocator DefaultMemoryAllocatorForSimpleArenaAllocator() {
135   return ResourceQuota::Default()->memory_quota()->CreateMemoryAllocator(
136       "simple-arena-allocator");
137 }
138 
SimpleArenaAllocator(size_t initial_size,MemoryAllocator allocator)139 RefCountedPtr<ArenaFactory> SimpleArenaAllocator(size_t initial_size,
140                                                  MemoryAllocator allocator) {
141   class Allocator : public ArenaFactory {
142    public:
143     Allocator(size_t initial_size, MemoryAllocator allocator)
144         : ArenaFactory(std::move(allocator)), initial_size_(initial_size) {}
145 
146     RefCountedPtr<Arena> MakeArena() override {
147       return Arena::Create(initial_size_, Ref());
148     }
149 
150     void FinalizeArena(Arena*) override {
151       // No-op.
152     }
153 
154    private:
155     size_t initial_size_;
156   };
157   return MakeRefCounted<Allocator>(initial_size, std::move(allocator));
158 }
159 
160 }  // namespace grpc_core
161