• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
18 #define ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
19 
20 #include <stddef.h>
21 #include <stdint.h>
22 
23 #include "bit_utils.h"
24 #include "debug_stack.h"
25 #include "dchecked_vector.h"
26 #include "macros.h"
27 #include "memory_tool.h"
28 
29 namespace art {
30 
31 class Arena;
32 class ArenaPool;
33 class ArenaAllocator;
34 class ArenaStack;
35 class ScopedArenaAllocator;
36 class MemStats;
37 
38 template <typename T>
39 class ArenaAllocatorAdapter;
40 
41 static constexpr bool kArenaAllocatorCountAllocations = false;
42 
43 // Type of allocation for memory tuning.
44 enum ArenaAllocKind {
45   kArenaAllocMisc,
46   kArenaAllocSwitchTable,
47   kArenaAllocSlowPaths,
48   kArenaAllocGrowableBitMap,
49   kArenaAllocSTL,
50   kArenaAllocGraphBuilder,
51   kArenaAllocGraph,
52   kArenaAllocBasicBlock,
53   kArenaAllocBlockList,
54   kArenaAllocReversePostOrder,
55   kArenaAllocLinearOrder,
56   kArenaAllocReachabilityGraph,
57   kArenaAllocConstantsMap,
58   kArenaAllocPredecessors,
59   kArenaAllocSuccessors,
60   kArenaAllocDominated,
61   kArenaAllocInstruction,
62   kArenaAllocConstructorFenceInputs,
63   kArenaAllocInvokeInputs,
64   kArenaAllocPhiInputs,
65   kArenaAllocTypeCheckInputs,
66   kArenaAllocLoopInfo,
67   kArenaAllocLoopInfoBackEdges,
68   kArenaAllocTryCatchInfo,
69   kArenaAllocUseListNode,
70   kArenaAllocEnvironment,
71   kArenaAllocEnvironmentVRegs,
72   kArenaAllocEnvironmentLocations,
73   kArenaAllocLocationSummary,
74   kArenaAllocSsaBuilder,
75   kArenaAllocMoveOperands,
76   kArenaAllocCodeBuffer,
77   kArenaAllocStackMaps,
78   kArenaAllocOptimization,
79   kArenaAllocGvn,
80   kArenaAllocInductionVarAnalysis,
81   kArenaAllocBoundsCheckElimination,
82   kArenaAllocDCE,
83   kArenaAllocLSA,
84   kArenaAllocLSE,
85   kArenaAllocCFRE,
86   kArenaAllocLICM,
87   kArenaAllocLoopOptimization,
88   kArenaAllocSsaLiveness,
89   kArenaAllocSsaPhiElimination,
90   kArenaAllocReferenceTypePropagation,
91   kArenaAllocSelectGenerator,
92   kArenaAllocSideEffectsAnalysis,
93   kArenaAllocRegisterAllocator,
94   kArenaAllocRegisterAllocatorValidate,
95   kArenaAllocStackMapStream,
96   kArenaAllocBitTableBuilder,
97   kArenaAllocVectorNode,
98   kArenaAllocCodeGenerator,
99   kArenaAllocAssembler,
100   kArenaAllocParallelMoveResolver,
101   kArenaAllocGraphChecker,
102   kArenaAllocVerifier,
103   kArenaAllocCallingConvention,
104   kArenaAllocCHA,
105   kArenaAllocScheduler,
106   kArenaAllocProfile,
107   kArenaAllocSuperblockCloner,
108   kNumArenaAllocKinds
109 };
110 
111 template <bool kCount>
112 class ArenaAllocatorStatsImpl;
113 
114 template <>
115 class ArenaAllocatorStatsImpl<false> {
116  public:
117   ArenaAllocatorStatsImpl() = default;
118   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
119   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
120 
Copy(const ArenaAllocatorStatsImpl & other ATTRIBUTE_UNUSED)121   void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
RecordAlloc(size_t bytes ATTRIBUTE_UNUSED,ArenaAllocKind kind ATTRIBUTE_UNUSED)122   void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
NumAllocations()123   size_t NumAllocations() const { return 0u; }
BytesAllocated()124   size_t BytesAllocated() const { return 0u; }
Dump(std::ostream & os ATTRIBUTE_UNUSED,const Arena * first ATTRIBUTE_UNUSED,ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED)125   void Dump(std::ostream& os ATTRIBUTE_UNUSED,
126             const Arena* first ATTRIBUTE_UNUSED,
127             ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
128 };
129 
130 template <bool kCount>
131 class ArenaAllocatorStatsImpl {
132  public:
133   ArenaAllocatorStatsImpl();
134   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
135   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
136 
137   void Copy(const ArenaAllocatorStatsImpl& other);
138   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
139   size_t NumAllocations() const;
140   size_t BytesAllocated() const;
141   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
142 
143  private:
144   size_t num_allocations_;
145   dchecked_vector<size_t> alloc_stats_;  // Bytes used by various allocation kinds.
146 
147   static const char* const kAllocNames[];
148 };
149 
150 typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
151 
152 class ArenaAllocatorMemoryTool {
153  public:
IsRunningOnMemoryTool()154   bool IsRunningOnMemoryTool() { return kMemoryToolIsAvailable; }
155 
MakeDefined(void * ptr,size_t size)156   void MakeDefined(void* ptr, size_t size) {
157     if (UNLIKELY(IsRunningOnMemoryTool())) {
158       DoMakeDefined(ptr, size);
159     }
160   }
MakeUndefined(void * ptr,size_t size)161   void MakeUndefined(void* ptr, size_t size) {
162     if (UNLIKELY(IsRunningOnMemoryTool())) {
163       DoMakeUndefined(ptr, size);
164     }
165   }
MakeInaccessible(void * ptr,size_t size)166   void MakeInaccessible(void* ptr, size_t size) {
167     if (UNLIKELY(IsRunningOnMemoryTool())) {
168       DoMakeInaccessible(ptr, size);
169     }
170   }
171 
172  private:
173   void DoMakeDefined(void* ptr, size_t size);
174   void DoMakeUndefined(void* ptr, size_t size);
175   void DoMakeInaccessible(void* ptr, size_t size);
176 };
177 
178 class Arena {
179  public:
180   Arena();
~Arena()181   virtual ~Arena() { }
182   // Reset is for pre-use and uses memset for performance.
183   void Reset();
184   // Release is used inbetween uses and uses madvise for memory usage.
Release()185   virtual void Release() { }
Begin()186   uint8_t* Begin() {
187     return memory_;
188   }
189 
End()190   uint8_t* End() {
191     return memory_ + size_;
192   }
193 
Size()194   size_t Size() const {
195     return size_;
196   }
197 
RemainingSpace()198   size_t RemainingSpace() const {
199     return Size() - bytes_allocated_;
200   }
201 
GetBytesAllocated()202   size_t GetBytesAllocated() const {
203     return bytes_allocated_;
204   }
205 
206   // Return true if ptr is contained in the arena.
Contains(const void * ptr)207   bool Contains(const void* ptr) const {
208     return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
209   }
210 
211  protected:
212   size_t bytes_allocated_;
213   uint8_t* memory_;
214   size_t size_;
215   Arena* next_;
216   friend class MallocArenaPool;
217   friend class MemMapArenaPool;
218   friend class ArenaAllocator;
219   friend class ArenaStack;
220   friend class ScopedArenaAllocator;
221   template <bool kCount> friend class ArenaAllocatorStatsImpl;
222 
223   friend class ArenaAllocatorTest;
224 
225  private:
226   DISALLOW_COPY_AND_ASSIGN(Arena);
227 };
228 
229 class ArenaPool {
230  public:
231   virtual ~ArenaPool() = default;
232 
233   virtual Arena* AllocArena(size_t size) = 0;
234   virtual void FreeArenaChain(Arena* first) = 0;
235   virtual size_t GetBytesAllocated() const = 0;
236   virtual void ReclaimMemory() = 0;
237   virtual void LockReclaimMemory() = 0;
238   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage.
239   virtual void TrimMaps() = 0;
240 
241  protected:
242   ArenaPool() = default;
243 
244  private:
245   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
246 };
247 
248 // Fast single-threaded allocator for zero-initialized memory chunks.
249 //
250 // Memory is allocated from ArenaPool in large chunks and then rationed through
251 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
252 // is destroyed.
253 class ArenaAllocator
254     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
255  public:
256   explicit ArenaAllocator(ArenaPool* pool);
257   ~ArenaAllocator();
258 
259   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
260   using ArenaAllocatorMemoryTool::MakeDefined;
261   using ArenaAllocatorMemoryTool::MakeUndefined;
262   using ArenaAllocatorMemoryTool::MakeInaccessible;
263 
264   // Get adapter for use in STL containers. See arena_containers.h .
265   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
266 
267   // Returns zeroed memory.
268   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
269     if (UNLIKELY(IsRunningOnMemoryTool())) {
270       return AllocWithMemoryTool(bytes, kind);
271     }
272     bytes = RoundUp(bytes, kAlignment);
273     ArenaAllocatorStats::RecordAlloc(bytes, kind);
274     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
275       return AllocFromNewArena(bytes);
276     }
277     uint8_t* ret = ptr_;
278     DCHECK_ALIGNED(ret, kAlignment);
279     ptr_ += bytes;
280     return ret;
281   }
282 
283   // Returns zeroed memory.
284   void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
285     // It is an error to request 16-byte aligned allocation of unaligned size.
286     DCHECK_ALIGNED(bytes, 16);
287     if (UNLIKELY(IsRunningOnMemoryTool())) {
288       return AllocWithMemoryToolAlign16(bytes, kind);
289     }
290     uintptr_t padding =
291         ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
292     ArenaAllocatorStats::RecordAlloc(bytes, kind);
293     if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
294       static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
295       return AllocFromNewArena(bytes);
296     }
297     ptr_ += padding;
298     uint8_t* ret = ptr_;
299     DCHECK_ALIGNED(ret, 16);
300     ptr_ += bytes;
301     return ret;
302   }
303 
304   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
305   void* Realloc(void* ptr,
306                 size_t ptr_size,
307                 size_t new_size,
308                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
309     DCHECK_GE(new_size, ptr_size);
310     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
311     // We always allocate aligned.
312     const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
313     auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
314     // If we haven't allocated anything else, we can safely extend.
315     if (end == ptr_) {
316       // Red zone prevents end == ptr_ (unless input = allocator state = null).
317       DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
318       const size_t aligned_new_size = RoundUp(new_size, kAlignment);
319       const size_t size_delta = aligned_new_size - aligned_ptr_size;
320       // Check remain space.
321       const size_t remain = end_ - ptr_;
322       if (remain >= size_delta) {
323         ptr_ += size_delta;
324         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
325         DCHECK_ALIGNED(ptr_, kAlignment);
326         return ptr;
327       }
328     }
329     auto* new_ptr = Alloc(new_size, kind);  // Note: Alloc will take care of aligning new_size.
330     memcpy(new_ptr, ptr, ptr_size);
331     // TODO: Call free on ptr if linear alloc supports free.
332     return new_ptr;
333   }
334 
335   template <typename T>
336   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
337     return AllocArray<T>(1, kind);
338   }
339 
340   template <typename T>
341   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
342     return static_cast<T*>(Alloc(length * sizeof(T), kind));
343   }
344 
345   size_t BytesAllocated() const;
346 
347   MemStats GetMemStats() const;
348 
349   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
350   // TODO: Change BytesAllocated to this behavior?
351   size_t BytesUsed() const;
352 
GetArenaPool()353   ArenaPool* GetArenaPool() const {
354     return pool_;
355   }
356 
357   bool Contains(const void* ptr) const;
358 
359   // The alignment guaranteed for individual allocations.
360   static constexpr size_t kAlignment = 8u;
361 
362   // The alignment required for the whole Arena rather than individual allocations.
363   static constexpr size_t kArenaAlignment = 16u;
364 
365  private:
366   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
367   void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
368   uint8_t* AllocFromNewArena(size_t bytes);
369   uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
370 
371   void UpdateBytesAllocated();
372 
373   ArenaPool* pool_;
374   uint8_t* begin_;
375   uint8_t* end_;
376   uint8_t* ptr_;
377   Arena* arena_head_;
378 
379   template <typename U>
380   friend class ArenaAllocatorAdapter;
381 
382   friend class ArenaAllocatorTest;
383 
384   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
385 };  // ArenaAllocator
386 
387 class MemStats {
388  public:
389   MemStats(const char* name,
390            const ArenaAllocatorStats* stats,
391            const Arena* first_arena,
392            ssize_t lost_bytes_adjustment = 0);
393   void Dump(std::ostream& os) const;
394 
395  private:
396   const char* const name_;
397   const ArenaAllocatorStats* const stats_;
398   const Arena* const first_arena_;
399   const ssize_t lost_bytes_adjustment_;
400 };  // MemStats
401 
402 }  // namespace art
403 
404 #endif  // ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
405