1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "arena_allocator-inl.h"
18
19
20 #include <algorithm>
21 #include <cstddef>
22 #include <iomanip>
23 #include <numeric>
24
25 #include <android-base/logging.h>
26
27 #include "mman.h"
28
29 namespace art {
30
31 constexpr size_t kMemoryToolRedZoneBytes = 8;
32
33 template <bool kCount>
34 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35 // Every name should have the same width and end with a space. Abbreviate if necessary:
36 "Misc ",
37 "SwitchTbl ",
38 "SlowPaths ",
39 "GrowBitMap ",
40 "STL ",
41 "GraphBuilder ",
42 "Graph ",
43 "BasicBlock ",
44 "BlockList ",
45 "RevPostOrder ",
46 "LinearOrder ",
47 "Reachability ",
48 "ConstantsMap ",
49 "Predecessors ",
50 "Successors ",
51 "Dominated ",
52 "Instruction ",
53 "CtorFenceIns ",
54 "InvokeInputs ",
55 "PhiInputs ",
56 "TypeCheckIns ",
57 "LoopInfo ",
58 "LIBackEdges ",
59 "TryCatchInf ",
60 "UseListNode ",
61 "Environment ",
62 "EnvVRegs ",
63 "EnvLocations ",
64 "LocSummary ",
65 "SsaBuilder ",
66 "MoveOperands ",
67 "CodeBuffer ",
68 "StackMaps ",
69 "Optimization ",
70 "GVN ",
71 "InductionVar ",
72 "BCE ",
73 "DCE ",
74 "LSA ",
75 "LSE ",
76 "CFRE ",
77 "LICM ",
78 "LoopOpt ",
79 "SsaLiveness ",
80 "SsaPhiElim ",
81 "RefTypeProp ",
82 "SelectGen ",
83 "SideEffects ",
84 "RegAllocator ",
85 "RegAllocVldt ",
86 "StackMapStm ",
87 "BitTableBld ",
88 "VectorNode ",
89 "CodeGen ",
90 "Assembler ",
91 "ParallelMove ",
92 "GraphChecker ",
93 "Verifier ",
94 "CallingConv ",
95 "CHA ",
96 "Scheduler ",
97 "Profile ",
98 "SBCloner ",
99 "Transaction ",
100 };
101
102 template <bool kCount>
ArenaAllocatorStatsImpl()103 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
104 : num_allocations_(0u),
105 alloc_stats_(kNumArenaAllocKinds, 0u) {
106 }
107
108 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)109 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
110 num_allocations_ = other.num_allocations_;
111 std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
112 }
113
114 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)115 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
116 alloc_stats_[kind] += bytes;
117 ++num_allocations_;
118 }
119
120 template <bool kCount>
NumAllocations() const121 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
122 return num_allocations_;
123 }
124
125 template <bool kCount>
BytesAllocated() const126 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
127 const size_t init = 0u; // Initial value of the correct type.
128 return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
129 }
130
131 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const132 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
133 ssize_t lost_bytes_adjustment) const {
134 size_t malloc_bytes = 0u;
135 size_t lost_bytes = 0u;
136 size_t num_arenas = 0u;
137 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
138 malloc_bytes += arena->Size();
139 lost_bytes += arena->RemainingSpace();
140 ++num_arenas;
141 }
142 // The lost_bytes_adjustment is used to make up for the fact that the current arena
143 // may not have the bytes_allocated_ updated correctly.
144 lost_bytes += lost_bytes_adjustment;
145 const size_t bytes_allocated = BytesAllocated();
146 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
147 << ", lost: " << lost_bytes << "\n";
148 size_t num_allocations = NumAllocations();
149 if (num_allocations != 0) {
150 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
151 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
152 }
153 os << "===== Allocation by kind\n";
154 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
155 for (int i = 0; i < kNumArenaAllocKinds; i++) {
156 // Reduce output by listing only allocation kinds that actually have allocations.
157 if (alloc_stats_[i] != 0u) {
158 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
159 }
160 }
161 }
162
163 #pragma GCC diagnostic push
164 #if __clang_major__ >= 4
165 #pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
166 #endif
167 // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
168 // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
169 // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
170 // but requires the warning "-Winstantiation-after-specialization" to be turned off.
171 //
172 // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
173 // (but keep the unnecessary code out of release builds) as we do not usually compile with
174 // kArenaAllocatorCountAllocations set to true.
175 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
176 #pragma GCC diagnostic pop
177
DoMakeDefined(void * ptr,size_t size)178 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
179 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
180 }
181
DoMakeUndefined(void * ptr,size_t size)182 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
183 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
184 }
185
DoMakeInaccessible(void * ptr,size_t size)186 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
187 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
188 }
189
Arena()190 Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) {
191 }
192
BytesAllocated() const193 size_t ArenaAllocator::BytesAllocated() const {
194 return ArenaAllocatorStats::BytesAllocated();
195 }
196
BytesUsed() const197 size_t ArenaAllocator::BytesUsed() const {
198 size_t total = ptr_ - begin_;
199 if (arena_head_ != nullptr) {
200 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
201 cur_arena = cur_arena->next_) {
202 total += cur_arena->GetBytesAllocated();
203 }
204 }
205 return total;
206 }
207
ArenaAllocator(ArenaPool * pool)208 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
209 : pool_(pool),
210 begin_(nullptr),
211 end_(nullptr),
212 ptr_(nullptr),
213 arena_head_(nullptr) {
214 }
215
UpdateBytesAllocated()216 void ArenaAllocator::UpdateBytesAllocated() {
217 if (arena_head_ != nullptr) {
218 // Update how many bytes we have allocated into the arena so that the arena pool knows how
219 // much memory to zero out.
220 arena_head_->bytes_allocated_ = ptr_ - begin_;
221 }
222 }
223
AllocWithMemoryTool(size_t bytes,ArenaAllocKind kind)224 void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
225 // We mark all memory for a newly retrieved arena as inaccessible and then
226 // mark only the actually allocated memory as defined. That leaves red zones
227 // and padding between allocations marked as inaccessible.
228 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
229 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
230 uint8_t* ret;
231 if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
232 ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
233 } else {
234 ret = ptr_;
235 ptr_ += rounded_bytes;
236 }
237 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
238 // Check that the memory is already zeroed out.
239 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
240 return ret;
241 }
242
AllocWithMemoryToolAlign16(size_t bytes,ArenaAllocKind kind)243 void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
244 // We mark all memory for a newly retrieved arena as inaccessible and then
245 // mark only the actually allocated memory as defined. That leaves red zones
246 // and padding between allocations marked as inaccessible.
247 size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
248 DCHECK_ALIGNED(rounded_bytes, 8); // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
249 uintptr_t padding =
250 ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
251 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
252 uint8_t* ret;
253 if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
254 static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
255 ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
256 } else {
257 ptr_ += padding; // Leave padding inaccessible.
258 ret = ptr_;
259 ptr_ += rounded_bytes;
260 }
261 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
262 // Check that the memory is already zeroed out.
263 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
264 return ret;
265 }
266
~ArenaAllocator()267 ArenaAllocator::~ArenaAllocator() {
268 // Reclaim all the arenas by giving them back to the thread pool.
269 UpdateBytesAllocated();
270 pool_->FreeArenaChain(arena_head_);
271 }
272
AllocFromNewArena(size_t bytes)273 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
274 Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
275 DCHECK(new_arena != nullptr);
276 DCHECK_LE(bytes, new_arena->Size());
277 if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
278 // The old arena has more space remaining than the new one, so keep using it.
279 // This can happen when the requested size is over half of the default size.
280 DCHECK(arena_head_ != nullptr);
281 new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena.
282 new_arena->next_ = arena_head_->next_;
283 arena_head_->next_ = new_arena;
284 } else {
285 UpdateBytesAllocated();
286 new_arena->next_ = arena_head_;
287 arena_head_ = new_arena;
288 // Update our internal data structures.
289 begin_ = new_arena->Begin();
290 DCHECK_ALIGNED(begin_, kAlignment);
291 ptr_ = begin_ + bytes;
292 end_ = new_arena->End();
293 }
294 return new_arena->Begin();
295 }
296
AllocFromNewArenaWithMemoryTool(size_t bytes)297 uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
298 uint8_t* ret = AllocFromNewArena(bytes);
299 uint8_t* noaccess_begin = ret + bytes;
300 uint8_t* noaccess_end;
301 if (ret == arena_head_->Begin()) {
302 DCHECK(ptr_ - bytes == ret);
303 noaccess_end = end_;
304 } else {
305 // We're still using the old arena but `ret` comes from a new one just after it.
306 DCHECK(arena_head_->next_ != nullptr);
307 DCHECK(ret == arena_head_->next_->Begin());
308 DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
309 noaccess_end = arena_head_->next_->End();
310 }
311 MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
312 return ret;
313 }
314
Contains(const void * ptr) const315 bool ArenaAllocator::Contains(const void* ptr) const {
316 if (ptr >= begin_ && ptr < end_) {
317 return true;
318 }
319 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
320 if (cur_arena->Contains(ptr)) {
321 return true;
322 }
323 }
324 return false;
325 }
326
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)327 MemStats::MemStats(const char* name,
328 const ArenaAllocatorStats* stats,
329 const Arena* first_arena,
330 ssize_t lost_bytes_adjustment)
331 : name_(name),
332 stats_(stats),
333 first_arena_(first_arena),
334 lost_bytes_adjustment_(lost_bytes_adjustment) {
335 }
336
Dump(std::ostream & os) const337 void MemStats::Dump(std::ostream& os) const {
338 os << name_ << " stats:\n";
339 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
340 }
341
342 // Dump memory usage stats.
GetMemStats() const343 MemStats ArenaAllocator::GetMemStats() const {
344 ssize_t lost_bytes_adjustment =
345 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
346 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
347 }
348
349 } // namespace art
350