• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "arena_allocator-inl.h"
18 
19 
20 #include <algorithm>
21 #include <cstddef>
22 #include <iomanip>
23 #include <numeric>
24 
25 #include <android-base/logging.h>
26 
27 #include "mman.h"
28 
29 namespace art {
30 
31 template <bool kCount>
32 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
33   // Every name should have the same width and end with a space. Abbreviate if necessary:
34   "Misc         ",
35   "SwitchTbl    ",
36   "SlowPaths    ",
37   "GrowBitMap   ",
38   "STL          ",
39   "GraphBuilder ",
40   "Graph        ",
41   "BasicBlock   ",
42   "BlockList    ",
43   "RevPostOrder ",
44   "LinearOrder  ",
45   "Reachability ",
46   "ConstantsMap ",
47   "Predecessors ",
48   "Successors   ",
49   "Dominated    ",
50   "Instruction  ",
51   "CtorFenceIns ",
52   "InvokeInputs ",
53   "PhiInputs    ",
54   "TypeCheckIns ",
55   "LoopInfo     ",
56   "LIBackEdges  ",
57   "TryCatchInf  ",
58   "UseListNode  ",
59   "Environment  ",
60   "EnvVRegs     ",
61   "EnvLocations ",
62   "LocSummary   ",
63   "SsaBuilder   ",
64   "MoveOperands ",
65   "CodeBuffer   ",
66   "StackMaps    ",
67   "Optimization ",
68   "GVN          ",
69   "InductionVar ",
70   "BCE          ",
71   "DCE          ",
72   "LSA          ",
73   "LSE          ",
74   "CFRE         ",
75   "LICM         ",
76   "WBE          ",
77   "LoopOpt      ",
78   "SsaLiveness  ",
79   "SsaPhiElim   ",
80   "RefTypeProp  ",
81   "SelectGen    ",
82   "SideEffects  ",
83   "RegAllocator ",
84   "RegAllocVldt ",
85   "StackMapStm  ",
86   "BitTableBld  ",
87   "VectorNode   ",
88   "CodeGen      ",
89   "Assembler    ",
90   "ParallelMove ",
91   "GraphChecker ",
92   "Verifier     ",
93   "CallingConv  ",
94   "CHA          ",
95   "Scheduler    ",
96   "Profile      ",
97   "SBCloner     ",
98   "Transaction  ",
99 };
100 
101 template <bool kCount>
ArenaAllocatorStatsImpl()102 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
103     : num_allocations_(0u),
104       alloc_stats_(kNumArenaAllocKinds, 0u) {
105 }
106 
107 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)108 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
109   num_allocations_ = other.num_allocations_;
110   std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
111 }
112 
113 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)114 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
115   alloc_stats_[kind] += bytes;
116   ++num_allocations_;
117 }
118 
119 template <bool kCount>
NumAllocations() const120 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
121   return num_allocations_;
122 }
123 
124 template <bool kCount>
BytesAllocated() const125 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
126   const size_t init = 0u;  // Initial value of the correct type.
127   return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
128 }
129 
130 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const131 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
132                                            ssize_t lost_bytes_adjustment) const {
133   size_t malloc_bytes = 0u;
134   size_t lost_bytes = 0u;
135   size_t num_arenas = 0u;
136   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
137     malloc_bytes += arena->Size();
138     lost_bytes += arena->RemainingSpace();
139     ++num_arenas;
140   }
141   // The lost_bytes_adjustment is used to make up for the fact that the current arena
142   // may not have the bytes_allocated_ updated correctly.
143   lost_bytes += lost_bytes_adjustment;
144   const size_t bytes_allocated = BytesAllocated();
145   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
146      << ", lost: " << lost_bytes << "\n";
147   size_t num_allocations = NumAllocations();
148   if (num_allocations != 0) {
149     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
150        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
151   }
152   os << "===== Allocation by kind\n";
153   static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
154   for (int i = 0; i < kNumArenaAllocKinds; i++) {
155     // Reduce output by listing only allocation kinds that actually have allocations.
156     if (alloc_stats_[i] != 0u) {
157       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
158     }
159   }
160 }
161 
162 #pragma GCC diagnostic push
163 #pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
164 // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
165 // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
166 // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
167 // but requires the warning "-Winstantiation-after-specialization" to be turned off.
168 //
169 // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
170 // (but keep the unnecessary code out of release builds) as we do not usually compile with
171 // kArenaAllocatorCountAllocations set to true.
172 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
173 #pragma GCC diagnostic pop
174 
DoMakeDefined(void * ptr,size_t size)175 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
176   MEMORY_TOOL_MAKE_DEFINED(ptr, size);
177 }
178 
DoMakeUndefined(void * ptr,size_t size)179 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
180   MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
181 }
182 
DoMakeInaccessible(void * ptr,size_t size)183 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
184   MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
185 }
186 
BytesAllocated() const187 size_t ArenaAllocator::BytesAllocated() const {
188   return ArenaAllocatorStats::BytesAllocated();
189 }
190 
BytesUsed() const191 size_t ArenaAllocator::BytesUsed() const {
192   size_t total = ptr_ - begin_;
193   if (arena_head_ != nullptr) {
194     for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
195          cur_arena = cur_arena->next_) {
196      total += cur_arena->GetBytesAllocated();
197     }
198   }
199   return total;
200 }
201 
ArenaAllocator(ArenaPool * pool)202 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
203   : pool_(pool),
204     begin_(nullptr),
205     end_(nullptr),
206     ptr_(nullptr),
207     arena_head_(nullptr) {
208 }
209 
UpdateBytesAllocated()210 void ArenaAllocator::UpdateBytesAllocated() {
211   if (arena_head_ != nullptr) {
212     // Update how many bytes we have allocated into the arena so that the arena pool knows how
213     // much memory to zero out.
214     arena_head_->bytes_allocated_ = ptr_ - begin_;
215   }
216 }
217 
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)218 void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
219   // We mark all memory for a newly retrieved arena as inaccessible and then
220   // mark only the actually allocated memory as defined. That leaves red zones
221   // and padding between allocations marked as inaccessible.
222   size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
223   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
224   uint8_t* ret;
225   if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
226     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
227   } else {
228     ret = ptr_;
229     ptr_ += rounded_bytes;
230   }
231   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
232   // Check that the memory is already zeroed out.
233   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
234   return ret;
235 }
236 
AllocWithMemoryToolAlign16(size_t bytes,ArenaAllocKind kind)237 void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
238   // We mark all memory for a newly retrieved arena as inaccessible and then
239   // mark only the actually allocated memory as defined. That leaves red zones
240   // and padding between allocations marked as inaccessible.
241   size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
242   DCHECK_ALIGNED(rounded_bytes, 8);  // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
243   uintptr_t padding =
244       RoundUp(reinterpret_cast<uintptr_t>(ptr_), 16) - reinterpret_cast<uintptr_t>(ptr_);
245   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
246   uint8_t* ret;
247   if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
248     static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
249     ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
250   } else {
251     ptr_ += padding;  // Leave padding inaccessible.
252     ret = ptr_;
253     ptr_ += rounded_bytes;
254   }
255   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
256   // Check that the memory is already zeroed out.
257   DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
258   return ret;
259 }
260 
~ArenaAllocator()261 ArenaAllocator::~ArenaAllocator() {
262   // Reclaim all the arenas by giving them back to the thread pool.
263   UpdateBytesAllocated();
264   pool_->FreeArenaChain(arena_head_);
265 }
266 
ResetCurrentArena()267 void ArenaAllocator::ResetCurrentArena() {
268   UpdateBytesAllocated();
269   begin_ = nullptr;
270   ptr_ = nullptr;
271   end_ = nullptr;
272 }
273 
AllocFromNewArena(size_t bytes)274 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
275   Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
276   DCHECK(new_arena != nullptr);
277   DCHECK_LE(bytes, new_arena->Size());
278   if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
279     // The old arena has more space remaining than the new one, so keep using it.
280     // This can happen when the requested size is over half of the default size.
281     DCHECK(arena_head_ != nullptr);
282     new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
283     new_arena->next_ = arena_head_->next_;
284     arena_head_->next_ = new_arena;
285   } else {
286     UpdateBytesAllocated();
287     new_arena->next_ = arena_head_;
288     arena_head_ = new_arena;
289     // Update our internal data structures.
290     begin_ = new_arena->Begin();
291     DCHECK_ALIGNED(begin_, kAlignment);
292     ptr_ = begin_ + bytes;
293     end_ = new_arena->End();
294   }
295   return new_arena->Begin();
296 }
297 
AllocFromNewArenaWithMemoryTool(size_t bytes)298 uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
299   uint8_t* ret = AllocFromNewArena(bytes);
300   uint8_t* noaccess_begin = ret + bytes;
301   uint8_t* noaccess_end;
302   if (ret == arena_head_->Begin()) {
303     DCHECK(ptr_ - bytes == ret);
304     noaccess_end = end_;
305   } else {
306     // We're still using the old arena but `ret` comes from a new one just after it.
307     DCHECK(arena_head_->next_ != nullptr);
308     DCHECK(ret == arena_head_->next_->Begin());
309     DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
310     noaccess_end = arena_head_->next_->End();
311   }
312   MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
313   return ret;
314 }
315 
Contains(const void * ptr) const316 bool ArenaAllocator::Contains(const void* ptr) const {
317   if (ptr >= begin_ && ptr < end_) {
318     return true;
319   }
320   for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
321     if (cur_arena->Contains(ptr)) {
322       return true;
323     }
324   }
325   return false;
326 }
327 
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)328 MemStats::MemStats(const char* name,
329                    const ArenaAllocatorStats* stats,
330                    const Arena* first_arena,
331                    ssize_t lost_bytes_adjustment)
332     : name_(name),
333       stats_(stats),
334       first_arena_(first_arena),
335       lost_bytes_adjustment_(lost_bytes_adjustment) {
336 }
337 
Dump(std::ostream & os) const338 void MemStats::Dump(std::ostream& os) const {
339   os << name_ << " stats:\n";
340   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
341 }
342 
343 // Dump memory usage stats.
GetMemStats() const344 MemStats ArenaAllocator::GetMemStats() const {
345   ssize_t lost_bytes_adjustment =
346       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
347   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
348 }
349 
350 }  // namespace art
351