• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "arena_allocator-inl.h"
18 
19 #include <sys/mman.h>
20 
21 #include <algorithm>
22 #include <cstddef>
23 #include <iomanip>
24 #include <numeric>
25 
26 #include "logging.h"
27 #include "mem_map.h"
28 #include "mutex.h"
29 #include "thread-current-inl.h"
30 #include "systrace.h"
31 
32 namespace art {
33 
34 constexpr size_t kMemoryToolRedZoneBytes = 8;
35 
36 template <bool kCount>
37 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
38   // Every name should have the same width and end with a space. Abbreviate if necessary:
39   "Misc         ",
40   "SwitchTbl    ",
41   "SlowPaths    ",
42   "GrowBitMap   ",
43   "STL          ",
44   "GraphBuilder ",
45   "Graph        ",
46   "BasicBlock   ",
47   "BlockList    ",
48   "RevPostOrder ",
49   "LinearOrder  ",
50   "ConstantsMap ",
51   "Predecessors ",
52   "Successors   ",
53   "Dominated    ",
54   "Instruction  ",
55   "CtorFenceIns ",
56   "InvokeInputs ",
57   "PhiInputs    ",
58   "LoopInfo     ",
59   "LIBackEdges  ",
60   "TryCatchInf  ",
61   "UseListNode  ",
62   "Environment  ",
63   "EnvVRegs     ",
64   "EnvLocations ",
65   "LocSummary   ",
66   "SsaBuilder   ",
67   "MoveOperands ",
68   "CodeBuffer   ",
69   "StackMaps    ",
70   "Optimization ",
71   "GVN          ",
72   "InductionVar ",
73   "BCE          ",
74   "DCE          ",
75   "LSE          ",
76   "LICM         ",
77   "LoopOpt      ",
78   "SsaLiveness  ",
79   "SsaPhiElim   ",
80   "RefTypeProp  ",
81   "SideEffects  ",
82   "RegAllocator ",
83   "RegAllocVldt ",
84   "StackMapStm  ",
85   "VectorNode   ",
86   "CodeGen      ",
87   "Assembler    ",
88   "ParallelMove ",
89   "GraphChecker ",
90   "Verifier     ",
91   "CallingConv  ",
92   "CHA          ",
93   "Scheduler    ",
94   "Profile      ",
95 };
96 
97 template <bool kCount>
ArenaAllocatorStatsImpl()98 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
99     : num_allocations_(0u),
100       alloc_stats_(kNumArenaAllocKinds, 0u) {
101 }
102 
103 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)104 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
105   num_allocations_ = other.num_allocations_;
106   std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
107 }
108 
109 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)110 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
111   alloc_stats_[kind] += bytes;
112   ++num_allocations_;
113 }
114 
115 template <bool kCount>
NumAllocations() const116 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
117   return num_allocations_;
118 }
119 
120 template <bool kCount>
BytesAllocated() const121 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
122   const size_t init = 0u;  // Initial value of the correct type.
123   return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
124 }
125 
126 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const127 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
128                                            ssize_t lost_bytes_adjustment) const {
129   size_t malloc_bytes = 0u;
130   size_t lost_bytes = 0u;
131   size_t num_arenas = 0u;
132   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
133     malloc_bytes += arena->Size();
134     lost_bytes += arena->RemainingSpace();
135     ++num_arenas;
136   }
137   // The lost_bytes_adjustment is used to make up for the fact that the current arena
138   // may not have the bytes_allocated_ updated correctly.
139   lost_bytes += lost_bytes_adjustment;
140   const size_t bytes_allocated = BytesAllocated();
141   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
142      << ", lost: " << lost_bytes << "\n";
143   size_t num_allocations = NumAllocations();
144   if (num_allocations != 0) {
145     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
146        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
147   }
148   os << "===== Allocation by kind\n";
149   static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
150   for (int i = 0; i < kNumArenaAllocKinds; i++) {
151       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
152   }
153 }
154 
155 #pragma GCC diagnostic push
156 #if __clang_major__ >= 4
157 #pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
158 #endif
159 // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
160 // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
161 // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
162 // but requires the warning "-Winstantiation-after-specialization" to be turned off.
163 //
164 // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
165 // (but keep the unnecessary code out of release builds) as we do not usually compile with
166 // kArenaAllocatorCountAllocations set to true.
167 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
168 #pragma GCC diagnostic pop
169 
DoMakeDefined(void * ptr,size_t size)170 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
171   MEMORY_TOOL_MAKE_DEFINED(ptr, size);
172 }
173 
DoMakeUndefined(void * ptr,size_t size)174 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
175   MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
176 }
177 
DoMakeInaccessible(void * ptr,size_t size)178 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
179   MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
180 }
181 
Arena()182 Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) {
183 }
184 
185 class MallocArena FINAL : public Arena {
186  public:
187   explicit MallocArena(size_t size = arena_allocator::kArenaDefaultSize);
188   virtual ~MallocArena();
189  private:
RequiredOverallocation()190   static constexpr size_t RequiredOverallocation() {
191     return (alignof(std::max_align_t) < ArenaAllocator::kArenaAlignment)
192         ? ArenaAllocator::kArenaAlignment - alignof(std::max_align_t)
193         : 0u;
194   }
195 
196   uint8_t* unaligned_memory_;
197 };
198 
MallocArena(size_t size)199 MallocArena::MallocArena(size_t size) {
200   // We need to guarantee kArenaAlignment aligned allocation for the new arena.
201   // TODO: Use std::aligned_alloc() when it becomes available with C++17.
202   constexpr size_t overallocation = RequiredOverallocation();
203   unaligned_memory_ = reinterpret_cast<uint8_t*>(calloc(1, size + overallocation));
204   CHECK(unaligned_memory_ != nullptr);  // Abort on OOM.
205   DCHECK_ALIGNED(unaligned_memory_, alignof(std::max_align_t));
206   if (overallocation == 0u) {
207     memory_ = unaligned_memory_;
208   } else {
209     memory_ = AlignUp(unaligned_memory_, ArenaAllocator::kArenaAlignment);
210     if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
211       size_t head = memory_ - unaligned_memory_;
212       size_t tail = overallocation - head;
213       MEMORY_TOOL_MAKE_NOACCESS(unaligned_memory_, head);
214       MEMORY_TOOL_MAKE_NOACCESS(memory_ + size, tail);
215     }
216   }
217   DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment);
218   size_ = size;
219 }
220 
~MallocArena()221 MallocArena::~MallocArena() {
222   constexpr size_t overallocation = RequiredOverallocation();
223   if (overallocation != 0u && UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
224     size_t head = memory_ - unaligned_memory_;
225     size_t tail = overallocation - head;
226     MEMORY_TOOL_MAKE_UNDEFINED(unaligned_memory_, head);
227     MEMORY_TOOL_MAKE_UNDEFINED(memory_ + size_, tail);
228   }
229   free(reinterpret_cast<void*>(unaligned_memory_));
230 }
231 
232 class MemMapArena FINAL : public Arena {
233  public:
234   MemMapArena(size_t size, bool low_4gb, const char* name);
235   virtual ~MemMapArena();
236   void Release() OVERRIDE;
237 
238  private:
239   std::unique_ptr<MemMap> map_;
240 };
241 
MemMapArena(size_t size,bool low_4gb,const char * name)242 MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
243   // Round up to a full page as that's the smallest unit of allocation for mmap()
244   // and we want to be able to use all memory that we actually allocate.
245   size = RoundUp(size, kPageSize);
246   std::string error_msg;
247   map_.reset(MemMap::MapAnonymous(
248       name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
249   CHECK(map_.get() != nullptr) << error_msg;
250   memory_ = map_->Begin();
251   static_assert(ArenaAllocator::kArenaAlignment <= kPageSize,
252                 "Arena should not need stronger alignment than kPageSize.");
253   DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment);
254   size_ = map_->Size();
255 }
256 
~MemMapArena()257 MemMapArena::~MemMapArena() {
258   // Destroys MemMap via std::unique_ptr<>.
259 }
260 
Release()261 void MemMapArena::Release() {
262   if (bytes_allocated_ > 0) {
263     map_->MadviseDontNeedAndZero();
264     bytes_allocated_ = 0;
265   }
266 }
267 
Reset()268 void Arena::Reset() {
269   if (bytes_allocated_ > 0) {
270     memset(Begin(), 0, bytes_allocated_);
271     bytes_allocated_ = 0;
272   }
273 }
274 
ArenaPool(bool use_malloc,bool low_4gb,const char * name)275 ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
276     : use_malloc_(use_malloc),
277       lock_("Arena pool lock", kArenaPoolLock),
278       free_arenas_(nullptr),
279       low_4gb_(low_4gb),
280       name_(name) {
281   if (low_4gb) {
282     CHECK(!use_malloc) << "low4gb must use map implementation";
283   }
284   if (!use_malloc) {
285     MemMap::Init();
286   }
287 }
288 
~ArenaPool()289 ArenaPool::~ArenaPool() {
290   ReclaimMemory();
291 }
292 
ReclaimMemory()293 void ArenaPool::ReclaimMemory() {
294   while (free_arenas_ != nullptr) {
295     auto* arena = free_arenas_;
296     free_arenas_ = free_arenas_->next_;
297     delete arena;
298   }
299 }
300 
LockReclaimMemory()301 void ArenaPool::LockReclaimMemory() {
302   MutexLock lock(Thread::Current(), lock_);
303   ReclaimMemory();
304 }
305 
AllocArena(size_t size)306 Arena* ArenaPool::AllocArena(size_t size) {
307   Thread* self = Thread::Current();
308   Arena* ret = nullptr;
309   {
310     MutexLock lock(self, lock_);
311     if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
312       ret = free_arenas_;
313       free_arenas_ = free_arenas_->next_;
314     }
315   }
316   if (ret == nullptr) {
317     ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
318         new MemMapArena(size, low_4gb_, name_);
319   }
320   ret->Reset();
321   return ret;
322 }
323 
TrimMaps()324 void ArenaPool::TrimMaps() {
325   if (!use_malloc_) {
326     ScopedTrace trace(__PRETTY_FUNCTION__);
327     // Doesn't work for malloc.
328     MutexLock lock(Thread::Current(), lock_);
329     for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
330       arena->Release();
331     }
332   }
333 }
334 
GetBytesAllocated() const335 size_t ArenaPool::GetBytesAllocated() const {
336   size_t total = 0;
337   MutexLock lock(Thread::Current(), lock_);
338   for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
339     total += arena->GetBytesAllocated();
340   }
341   return total;
342 }
343 
FreeArenaChain(Arena * first)344 void ArenaPool::FreeArenaChain(Arena* first) {
345   if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
346     for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
347       MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
348     }
349   }
350 
351   if (arena_allocator::kArenaAllocatorPreciseTracking) {
352     // Do not reuse arenas when tracking.
353     while (first != nullptr) {
354       Arena* next = first->next_;
355       delete first;
356       first = next;
357     }
358     return;
359   }
360 
361   if (first != nullptr) {
362     Arena* last = first;
363     while (last->next_ != nullptr) {
364       last = last->next_;
365     }
366     Thread* self = Thread::Current();
367     MutexLock lock(self, lock_);
368     last->next_ = free_arenas_;
369     free_arenas_ = first;
370   }
371 }
372 
BytesAllocated() const373 size_t ArenaAllocator::BytesAllocated() const {
374   return ArenaAllocatorStats::BytesAllocated();
375 }
376 
BytesUsed() const377 size_t ArenaAllocator::BytesUsed() const {
378   size_t total = ptr_ - begin_;
379   if (arena_head_ != nullptr) {
380     for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
381          cur_arena = cur_arena->next_) {
382      total += cur_arena->GetBytesAllocated();
383     }
384   }
385   return total;
386 }
387 
ArenaAllocator(ArenaPool * pool)388 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
389   : pool_(pool),
390     begin_(nullptr),
391     end_(nullptr),
392     ptr_(nullptr),
393     arena_head_(nullptr) {
394 }
395 
UpdateBytesAllocated()396 void ArenaAllocator::UpdateBytesAllocated() {
397   if (arena_head_ != nullptr) {
398     // Update how many bytes we have allocated into the arena so that the arena pool knows how
399     // much memory to zero out.
400     arena_head_->bytes_allocated_ = ptr_ - begin_;
401   }
402 }
403 
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)404 void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
405   // We mark all memory for a newly retrieved arena as inaccessible and then
406   // mark only the actually allocated memory as defined. That leaves red zones
407   // and padding between allocations marked as inaccessible.
408   size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
409   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
410   uint8_t* ret;
411   if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
412     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
413   } else {
414     ret = ptr_;
415     ptr_ += rounded_bytes;
416   }
417   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
418   // Check that the memory is already zeroed out.
419   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
420   return ret;
421 }
422 
AllocWithMemoryToolAlign16(size_t bytes,ArenaAllocKind kind)423 void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
424   // We mark all memory for a newly retrieved arena as inaccessible and then
425   // mark only the actually allocated memory as defined. That leaves red zones
426   // and padding between allocations marked as inaccessible.
427   size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
428   DCHECK_ALIGNED(rounded_bytes, 8);  // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
429   uintptr_t padding =
430       ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
431   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
432   uint8_t* ret;
433   if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
434     static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
435     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
436   } else {
437     ptr_ += padding;  // Leave padding inaccessible.
438     ret = ptr_;
439     ptr_ += rounded_bytes;
440   }
441   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
442   // Check that the memory is already zeroed out.
443   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
444   return ret;
445 }
446 
~ArenaAllocator()447 ArenaAllocator::~ArenaAllocator() {
448   // Reclaim all the arenas by giving them back to the thread pool.
449   UpdateBytesAllocated();
450   pool_->FreeArenaChain(arena_head_);
451 }
452 
AllocFromNewArena(size_t bytes)453 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
454   Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
455   DCHECK(new_arena != nullptr);
456   DCHECK_LE(bytes, new_arena->Size());
457   if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
458     // The old arena has more space remaining than the new one, so keep using it.
459     // This can happen when the requested size is over half of the default size.
460     DCHECK(arena_head_ != nullptr);
461     new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
462     new_arena->next_ = arena_head_->next_;
463     arena_head_->next_ = new_arena;
464   } else {
465     UpdateBytesAllocated();
466     new_arena->next_ = arena_head_;
467     arena_head_ = new_arena;
468     // Update our internal data structures.
469     begin_ = new_arena->Begin();
470     DCHECK_ALIGNED(begin_, kAlignment);
471     ptr_ = begin_ + bytes;
472     end_ = new_arena->End();
473   }
474   return new_arena->Begin();
475 }
476 
AllocFromNewArenaWithMemoryTool(size_t bytes)477 uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
478   uint8_t* ret = AllocFromNewArena(bytes);
479   uint8_t* noaccess_begin = ret + bytes;
480   uint8_t* noaccess_end;
481   if (ret == arena_head_->Begin()) {
482     DCHECK(ptr_ - bytes == ret);
483     noaccess_end = end_;
484   } else {
485     // We're still using the old arena but `ret` comes from a new one just after it.
486     DCHECK(arena_head_->next_ != nullptr);
487     DCHECK(ret == arena_head_->next_->Begin());
488     DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
489     noaccess_end = arena_head_->next_->End();
490   }
491   MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
492   return ret;
493 }
494 
Contains(const void * ptr) const495 bool ArenaAllocator::Contains(const void* ptr) const {
496   if (ptr >= begin_ && ptr < end_) {
497     return true;
498   }
499   for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
500     if (cur_arena->Contains(ptr)) {
501       return true;
502     }
503   }
504   return false;
505 }
506 
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)507 MemStats::MemStats(const char* name,
508                    const ArenaAllocatorStats* stats,
509                    const Arena* first_arena,
510                    ssize_t lost_bytes_adjustment)
511     : name_(name),
512       stats_(stats),
513       first_arena_(first_arena),
514       lost_bytes_adjustment_(lost_bytes_adjustment) {
515 }
516 
Dump(std::ostream & os) const517 void MemStats::Dump(std::ostream& os) const {
518   os << name_ << " stats:\n";
519   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
520 }
521 
522 // Dump memory usage stats.
GetMemStats() const523 MemStats ArenaAllocator::GetMemStats() const {
524   ssize_t lost_bytes_adjustment =
525       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
526   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
527 }
528 
529 }  // namespace art
530