• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "arena_allocator-inl.h"
18 
19 
20 #include <algorithm>
21 #include <cstddef>
22 #include <iomanip>
23 #include <numeric>
24 
25 #include <android-base/logging.h>
26 
27 #include "mman.h"
28 
29 namespace art {
30 
31 template <bool kCount>
32 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
33   // Every name should have the same width and end with a space. Abbreviate if necessary:
34   "Misc         ",
35   "SwitchTbl    ",
36   "SlowPaths    ",
37   "GrowBitMap   ",
38   "STL          ",
39   "GraphBuilder ",
40   "Graph        ",
41   "BasicBlock   ",
42   "BlockList    ",
43   "RevPostOrder ",
44   "LinearOrder  ",
45   "Reachability ",
46   "ConstantsMap ",
47   "Predecessors ",
48   "Successors   ",
49   "Dominated    ",
50   "Instruction  ",
51   "CtorFenceIns ",
52   "InvokeInputs ",
53   "PhiInputs    ",
54   "TypeCheckIns ",
55   "LoopInfo     ",
56   "LIBackEdges  ",
57   "TryCatchInf  ",
58   "UseListNode  ",
59   "Environment  ",
60   "EnvVRegs     ",
61   "EnvLocations ",
62   "LocSummary   ",
63   "SsaBuilder   ",
64   "MoveOperands ",
65   "CodeBuffer   ",
66   "StackMaps    ",
67   "Optimization ",
68   "GVN          ",
69   "InductionVar ",
70   "BCE          ",
71   "DCE          ",
72   "LSA          ",
73   "LSE          ",
74   "CFRE         ",
75   "LICM         ",
76   "WBE          ",
77   "LoopOpt      ",
78   "SsaLiveness  ",
79   "SsaPhiElim   ",
80   "RefTypeProp  ",
81   "SelectGen    ",
82   "SideEffects  ",
83   "RegAllocator ",
84   "RegAllocVldt ",
85   "StackMapStm  ",
86   "BitTableBld  ",
87   "VectorNode   ",
88   "CodeGen      ",
89   "Assembler    ",
90   "ParallelMove ",
91   "GraphChecker ",
92   "Verifier     ",
93   "CallingConv  ",
94   "CHA          ",
95   "Scheduler    ",
96   "Profile      ",
97   "SBCloner     ",
98   "Transaction  ",
99 };
100 
101 template <bool kCount>
ArenaAllocatorStatsImpl()102 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
103     : num_allocations_(0u),
104       alloc_stats_(kNumArenaAllocKinds, 0u) {
105 }
106 
107 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)108 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
109   num_allocations_ = other.num_allocations_;
110   std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
111 }
112 
113 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)114 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
115   alloc_stats_[kind] += bytes;
116   ++num_allocations_;
117 }
118 
119 template <bool kCount>
NumAllocations() const120 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
121   return num_allocations_;
122 }
123 
124 template <bool kCount>
BytesAllocated() const125 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
126   const size_t init = 0u;  // Initial value of the correct type.
127   return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
128 }
129 
130 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const131 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
132                                            ssize_t lost_bytes_adjustment) const {
133   size_t malloc_bytes = 0u;
134   size_t lost_bytes = 0u;
135   size_t num_arenas = 0u;
136   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
137     malloc_bytes += arena->Size();
138     lost_bytes += arena->RemainingSpace();
139     ++num_arenas;
140   }
141   // The lost_bytes_adjustment is used to make up for the fact that the current arena
142   // may not have the bytes_allocated_ updated correctly.
143   lost_bytes += lost_bytes_adjustment;
144   const size_t bytes_allocated = BytesAllocated();
145   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
146      << ", lost: " << lost_bytes << "\n";
147   size_t num_allocations = NumAllocations();
148   if (num_allocations != 0) {
149     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
150        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
151   }
152   os << "===== Allocation by kind\n";
153   static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
154   for (int i = 0; i < kNumArenaAllocKinds; i++) {
155     // Reduce output by listing only allocation kinds that actually have allocations.
156     if (alloc_stats_[i] != 0u) {
157       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
158     }
159   }
160 }
161 
162 #pragma GCC diagnostic push
163 #if __clang_major__ >= 4
164 #pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
165 #endif
166 // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
167 // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
168 // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
169 // but requires the warning "-Winstantiation-after-specialization" to be turned off.
170 //
171 // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
172 // (but keep the unnecessary code out of release builds) as we do not usually compile with
173 // kArenaAllocatorCountAllocations set to true.
174 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
175 #pragma GCC diagnostic pop
176 
DoMakeDefined(void * ptr,size_t size)177 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
178   MEMORY_TOOL_MAKE_DEFINED(ptr, size);
179 }
180 
DoMakeUndefined(void * ptr,size_t size)181 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
182   MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
183 }
184 
DoMakeInaccessible(void * ptr,size_t size)185 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
186   MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
187 }
188 
BytesAllocated() const189 size_t ArenaAllocator::BytesAllocated() const {
190   return ArenaAllocatorStats::BytesAllocated();
191 }
192 
BytesUsed() const193 size_t ArenaAllocator::BytesUsed() const {
194   size_t total = ptr_ - begin_;
195   if (arena_head_ != nullptr) {
196     for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
197          cur_arena = cur_arena->next_) {
198      total += cur_arena->GetBytesAllocated();
199     }
200   }
201   return total;
202 }
203 
ArenaAllocator(ArenaPool * pool)204 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
205   : pool_(pool),
206     begin_(nullptr),
207     end_(nullptr),
208     ptr_(nullptr),
209     arena_head_(nullptr) {
210 }
211 
UpdateBytesAllocated()212 void ArenaAllocator::UpdateBytesAllocated() {
213   if (arena_head_ != nullptr) {
214     // Update how many bytes we have allocated into the arena so that the arena pool knows how
215     // much memory to zero out.
216     arena_head_->bytes_allocated_ = ptr_ - begin_;
217   }
218 }
219 
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)220 void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
221   // We mark all memory for a newly retrieved arena as inaccessible and then
222   // mark only the actually allocated memory as defined. That leaves red zones
223   // and padding between allocations marked as inaccessible.
224   size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
225   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
226   uint8_t* ret;
227   if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
228     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
229   } else {
230     ret = ptr_;
231     ptr_ += rounded_bytes;
232   }
233   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
234   // Check that the memory is already zeroed out.
235   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
236   return ret;
237 }
238 
AllocWithMemoryToolAlign16(size_t bytes,ArenaAllocKind kind)239 void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
240   // We mark all memory for a newly retrieved arena as inaccessible and then
241   // mark only the actually allocated memory as defined. That leaves red zones
242   // and padding between allocations marked as inaccessible.
243   size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
244   DCHECK_ALIGNED(rounded_bytes, 8);  // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
245   uintptr_t padding =
246       RoundUp(reinterpret_cast<uintptr_t>(ptr_), 16) - reinterpret_cast<uintptr_t>(ptr_);
247   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
248   uint8_t* ret;
249   if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
250     static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
251     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
252   } else {
253     ptr_ += padding;  // Leave padding inaccessible.
254     ret = ptr_;
255     ptr_ += rounded_bytes;
256   }
257   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
258   // Check that the memory is already zeroed out.
259   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
260   return ret;
261 }
262 
~ArenaAllocator()263 ArenaAllocator::~ArenaAllocator() {
264   // Reclaim all the arenas by giving them back to the thread pool.
265   UpdateBytesAllocated();
266   pool_->FreeArenaChain(arena_head_);
267 }
268 
ResetCurrentArena()269 void ArenaAllocator::ResetCurrentArena() {
270   UpdateBytesAllocated();
271   begin_ = nullptr;
272   ptr_ = nullptr;
273   end_ = nullptr;
274 }
275 
AllocFromNewArena(size_t bytes)276 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
277   Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
278   DCHECK(new_arena != nullptr);
279   DCHECK_LE(bytes, new_arena->Size());
280   if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
281     // The old arena has more space remaining than the new one, so keep using it.
282     // This can happen when the requested size is over half of the default size.
283     DCHECK(arena_head_ != nullptr);
284     new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
285     new_arena->next_ = arena_head_->next_;
286     arena_head_->next_ = new_arena;
287   } else {
288     UpdateBytesAllocated();
289     new_arena->next_ = arena_head_;
290     arena_head_ = new_arena;
291     // Update our internal data structures.
292     begin_ = new_arena->Begin();
293     DCHECK_ALIGNED(begin_, kAlignment);
294     ptr_ = begin_ + bytes;
295     end_ = new_arena->End();
296   }
297   return new_arena->Begin();
298 }
299 
AllocFromNewArenaWithMemoryTool(size_t bytes)300 uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
301   uint8_t* ret = AllocFromNewArena(bytes);
302   uint8_t* noaccess_begin = ret + bytes;
303   uint8_t* noaccess_end;
304   if (ret == arena_head_->Begin()) {
305     DCHECK(ptr_ - bytes == ret);
306     noaccess_end = end_;
307   } else {
308     // We're still using the old arena but `ret` comes from a new one just after it.
309     DCHECK(arena_head_->next_ != nullptr);
310     DCHECK(ret == arena_head_->next_->Begin());
311     DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
312     noaccess_end = arena_head_->next_->End();
313   }
314   MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
315   return ret;
316 }
317 
Contains(const void * ptr) const318 bool ArenaAllocator::Contains(const void* ptr) const {
319   if (ptr >= begin_ && ptr < end_) {
320     return true;
321   }
322   for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
323     if (cur_arena->Contains(ptr)) {
324       return true;
325     }
326   }
327   return false;
328 }
329 
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)330 MemStats::MemStats(const char* name,
331                    const ArenaAllocatorStats* stats,
332                    const Arena* first_arena,
333                    ssize_t lost_bytes_adjustment)
334     : name_(name),
335       stats_(stats),
336       first_arena_(first_arena),
337       lost_bytes_adjustment_(lost_bytes_adjustment) {
338 }
339 
Dump(std::ostream & os) const340 void MemStats::Dump(std::ostream& os) const {
341   os << name_ << " stats:\n";
342   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
343 }
344 
345 // Dump memory usage stats.
GetMemStats() const346 MemStats ArenaAllocator::GetMemStats() const {
347   ssize_t lost_bytes_adjustment =
348       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
349   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
350 }
351 
352 }  // namespace art
353