• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "arena_allocator-inl.h"
18 
19 
20 #include <algorithm>
21 #include <cstddef>
22 #include <iomanip>
23 #include <numeric>
24 
25 #include <android-base/logging.h>
26 
27 #include "mman.h"
28 
29 namespace art {
30 
31 constexpr size_t kMemoryToolRedZoneBytes = 8;
32 
33 template <bool kCount>
34 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35   // Every name should have the same width and end with a space. Abbreviate if necessary:
36   "Misc         ",
37   "SwitchTbl    ",
38   "SlowPaths    ",
39   "GrowBitMap   ",
40   "STL          ",
41   "GraphBuilder ",
42   "Graph        ",
43   "BasicBlock   ",
44   "BlockList    ",
45   "RevPostOrder ",
46   "LinearOrder  ",
47   "ConstantsMap ",
48   "Predecessors ",
49   "Successors   ",
50   "Dominated    ",
51   "Instruction  ",
52   "CtorFenceIns ",
53   "InvokeInputs ",
54   "PhiInputs    ",
55   "TypeCheckIns ",
56   "LoopInfo     ",
57   "LIBackEdges  ",
58   "TryCatchInf  ",
59   "UseListNode  ",
60   "Environment  ",
61   "EnvVRegs     ",
62   "EnvLocations ",
63   "LocSummary   ",
64   "SsaBuilder   ",
65   "MoveOperands ",
66   "CodeBuffer   ",
67   "StackMaps    ",
68   "Optimization ",
69   "GVN          ",
70   "InductionVar ",
71   "BCE          ",
72   "DCE          ",
73   "LSA          ",
74   "LSE          ",
75   "CFRE         ",
76   "LICM         ",
77   "LoopOpt      ",
78   "SsaLiveness  ",
79   "SsaPhiElim   ",
80   "RefTypeProp  ",
81   "SelectGen    ",
82   "SideEffects  ",
83   "RegAllocator ",
84   "RegAllocVldt ",
85   "StackMapStm  ",
86   "BitTableBld  ",
87   "VectorNode   ",
88   "CodeGen      ",
89   "Assembler    ",
90   "ParallelMove ",
91   "GraphChecker ",
92   "Verifier     ",
93   "CallingConv  ",
94   "CHA          ",
95   "Scheduler    ",
96   "Profile      ",
97   "SBCloner     ",
98 };
99 
100 template <bool kCount>
ArenaAllocatorStatsImpl()101 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
102     : num_allocations_(0u),
103       alloc_stats_(kNumArenaAllocKinds, 0u) {
104 }
105 
106 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)107 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
108   num_allocations_ = other.num_allocations_;
109   std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
110 }
111 
112 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)113 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
114   alloc_stats_[kind] += bytes;
115   ++num_allocations_;
116 }
117 
118 template <bool kCount>
NumAllocations() const119 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
120   return num_allocations_;
121 }
122 
123 template <bool kCount>
BytesAllocated() const124 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
125   const size_t init = 0u;  // Initial value of the correct type.
126   return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
127 }
128 
129 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const130 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
131                                            ssize_t lost_bytes_adjustment) const {
132   size_t malloc_bytes = 0u;
133   size_t lost_bytes = 0u;
134   size_t num_arenas = 0u;
135   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
136     malloc_bytes += arena->Size();
137     lost_bytes += arena->RemainingSpace();
138     ++num_arenas;
139   }
140   // The lost_bytes_adjustment is used to make up for the fact that the current arena
141   // may not have the bytes_allocated_ updated correctly.
142   lost_bytes += lost_bytes_adjustment;
143   const size_t bytes_allocated = BytesAllocated();
144   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
145      << ", lost: " << lost_bytes << "\n";
146   size_t num_allocations = NumAllocations();
147   if (num_allocations != 0) {
148     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
149        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
150   }
151   os << "===== Allocation by kind\n";
152   static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
153   for (int i = 0; i < kNumArenaAllocKinds; i++) {
154     // Reduce output by listing only allocation kinds that actually have allocations.
155     if (alloc_stats_[i] != 0u) {
156       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
157     }
158   }
159 }
160 
161 #pragma GCC diagnostic push
162 #if __clang_major__ >= 4
163 #pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
164 #endif
165 // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
166 // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
167 // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
168 // but requires the warning "-Winstantiation-after-specialization" to be turned off.
169 //
170 // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
171 // (but keep the unnecessary code out of release builds) as we do not usually compile with
172 // kArenaAllocatorCountAllocations set to true.
173 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
174 #pragma GCC diagnostic pop
175 
DoMakeDefined(void * ptr,size_t size)176 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
177   MEMORY_TOOL_MAKE_DEFINED(ptr, size);
178 }
179 
DoMakeUndefined(void * ptr,size_t size)180 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
181   MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
182 }
183 
DoMakeInaccessible(void * ptr,size_t size)184 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
185   MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
186 }
187 
Arena()188 Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) {
189 }
190 
BytesAllocated() const191 size_t ArenaAllocator::BytesAllocated() const {
192   return ArenaAllocatorStats::BytesAllocated();
193 }
194 
BytesUsed() const195 size_t ArenaAllocator::BytesUsed() const {
196   size_t total = ptr_ - begin_;
197   if (arena_head_ != nullptr) {
198     for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
199          cur_arena = cur_arena->next_) {
200      total += cur_arena->GetBytesAllocated();
201     }
202   }
203   return total;
204 }
205 
ArenaAllocator(ArenaPool * pool)206 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
207   : pool_(pool),
208     begin_(nullptr),
209     end_(nullptr),
210     ptr_(nullptr),
211     arena_head_(nullptr) {
212 }
213 
UpdateBytesAllocated()214 void ArenaAllocator::UpdateBytesAllocated() {
215   if (arena_head_ != nullptr) {
216     // Update how many bytes we have allocated into the arena so that the arena pool knows how
217     // much memory to zero out.
218     arena_head_->bytes_allocated_ = ptr_ - begin_;
219   }
220 }
221 
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)222 void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
223   // We mark all memory for a newly retrieved arena as inaccessible and then
224   // mark only the actually allocated memory as defined. That leaves red zones
225   // and padding between allocations marked as inaccessible.
226   size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
227   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
228   uint8_t* ret;
229   if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
230     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
231   } else {
232     ret = ptr_;
233     ptr_ += rounded_bytes;
234   }
235   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
236   // Check that the memory is already zeroed out.
237   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
238   return ret;
239 }
240 
AllocWithMemoryToolAlign16(size_t bytes,ArenaAllocKind kind)241 void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
242   // We mark all memory for a newly retrieved arena as inaccessible and then
243   // mark only the actually allocated memory as defined. That leaves red zones
244   // and padding between allocations marked as inaccessible.
245   size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
246   DCHECK_ALIGNED(rounded_bytes, 8);  // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
247   uintptr_t padding =
248       ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
249   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
250   uint8_t* ret;
251   if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
252     static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
253     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
254   } else {
255     ptr_ += padding;  // Leave padding inaccessible.
256     ret = ptr_;
257     ptr_ += rounded_bytes;
258   }
259   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
260   // Check that the memory is already zeroed out.
261   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
262   return ret;
263 }
264 
~ArenaAllocator()265 ArenaAllocator::~ArenaAllocator() {
266   // Reclaim all the arenas by giving them back to the thread pool.
267   UpdateBytesAllocated();
268   pool_->FreeArenaChain(arena_head_);
269 }
270 
AllocFromNewArena(size_t bytes)271 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
272   Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
273   DCHECK(new_arena != nullptr);
274   DCHECK_LE(bytes, new_arena->Size());
275   if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
276     // The old arena has more space remaining than the new one, so keep using it.
277     // This can happen when the requested size is over half of the default size.
278     DCHECK(arena_head_ != nullptr);
279     new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
280     new_arena->next_ = arena_head_->next_;
281     arena_head_->next_ = new_arena;
282   } else {
283     UpdateBytesAllocated();
284     new_arena->next_ = arena_head_;
285     arena_head_ = new_arena;
286     // Update our internal data structures.
287     begin_ = new_arena->Begin();
288     DCHECK_ALIGNED(begin_, kAlignment);
289     ptr_ = begin_ + bytes;
290     end_ = new_arena->End();
291   }
292   return new_arena->Begin();
293 }
294 
AllocFromNewArenaWithMemoryTool(size_t bytes)295 uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
296   uint8_t* ret = AllocFromNewArena(bytes);
297   uint8_t* noaccess_begin = ret + bytes;
298   uint8_t* noaccess_end;
299   if (ret == arena_head_->Begin()) {
300     DCHECK(ptr_ - bytes == ret);
301     noaccess_end = end_;
302   } else {
303     // We're still using the old arena but `ret` comes from a new one just after it.
304     DCHECK(arena_head_->next_ != nullptr);
305     DCHECK(ret == arena_head_->next_->Begin());
306     DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
307     noaccess_end = arena_head_->next_->End();
308   }
309   MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
310   return ret;
311 }
312 
Contains(const void * ptr) const313 bool ArenaAllocator::Contains(const void* ptr) const {
314   if (ptr >= begin_ && ptr < end_) {
315     return true;
316   }
317   for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
318     if (cur_arena->Contains(ptr)) {
319       return true;
320     }
321   }
322   return false;
323 }
324 
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)325 MemStats::MemStats(const char* name,
326                    const ArenaAllocatorStats* stats,
327                    const Arena* first_arena,
328                    ssize_t lost_bytes_adjustment)
329     : name_(name),
330       stats_(stats),
331       first_arena_(first_arena),
332       lost_bytes_adjustment_(lost_bytes_adjustment) {
333 }
334 
Dump(std::ostream & os) const335 void MemStats::Dump(std::ostream& os) const {
336   os << name_ << " stats:\n";
337   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
338 }
339 
340 // Dump memory usage stats.
GetMemStats() const341 MemStats ArenaAllocator::GetMemStats() const {
342   ssize_t lost_bytes_adjustment =
343       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
344   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
345 }
346 
347 }  // namespace art
348