• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
18 #define ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
19 
20 #include <stddef.h>
21 #include <stdint.h>
22 
23 #include "bit_utils.h"
24 #include "debug_stack.h"
25 #include "dchecked_vector.h"
26 #include "macros.h"
27 #include "memory_tool.h"
28 
29 namespace art {
30 
31 class Arena;
32 class ArenaPool;
33 class ArenaAllocator;
34 class ArenaStack;
35 class ScopedArenaAllocator;
36 class MemStats;
37 
38 template <typename T>
39 class ArenaAllocatorAdapter;
40 
41 static constexpr bool kArenaAllocatorCountAllocations = false;
42 
43 // Type of allocation for memory tuning.
44 enum ArenaAllocKind {
45   kArenaAllocMisc,
46   kArenaAllocSwitchTable,
47   kArenaAllocSlowPaths,
48   kArenaAllocGrowableBitMap,
49   kArenaAllocSTL,
50   kArenaAllocGraphBuilder,
51   kArenaAllocGraph,
52   kArenaAllocBasicBlock,
53   kArenaAllocBlockList,
54   kArenaAllocReversePostOrder,
55   kArenaAllocLinearOrder,
56   kArenaAllocConstantsMap,
57   kArenaAllocPredecessors,
58   kArenaAllocSuccessors,
59   kArenaAllocDominated,
60   kArenaAllocInstruction,
61   kArenaAllocConstructorFenceInputs,
62   kArenaAllocInvokeInputs,
63   kArenaAllocPhiInputs,
64   kArenaAllocTypeCheckInputs,
65   kArenaAllocLoopInfo,
66   kArenaAllocLoopInfoBackEdges,
67   kArenaAllocTryCatchInfo,
68   kArenaAllocUseListNode,
69   kArenaAllocEnvironment,
70   kArenaAllocEnvironmentVRegs,
71   kArenaAllocEnvironmentLocations,
72   kArenaAllocLocationSummary,
73   kArenaAllocSsaBuilder,
74   kArenaAllocMoveOperands,
75   kArenaAllocCodeBuffer,
76   kArenaAllocStackMaps,
77   kArenaAllocOptimization,
78   kArenaAllocGvn,
79   kArenaAllocInductionVarAnalysis,
80   kArenaAllocBoundsCheckElimination,
81   kArenaAllocDCE,
82   kArenaAllocLSA,
83   kArenaAllocLSE,
84   kArenaAllocCFRE,
85   kArenaAllocLICM,
86   kArenaAllocLoopOptimization,
87   kArenaAllocSsaLiveness,
88   kArenaAllocSsaPhiElimination,
89   kArenaAllocReferenceTypePropagation,
90   kArenaAllocSelectGenerator,
91   kArenaAllocSideEffectsAnalysis,
92   kArenaAllocRegisterAllocator,
93   kArenaAllocRegisterAllocatorValidate,
94   kArenaAllocStackMapStream,
95   kArenaAllocBitTableBuilder,
96   kArenaAllocVectorNode,
97   kArenaAllocCodeGenerator,
98   kArenaAllocAssembler,
99   kArenaAllocParallelMoveResolver,
100   kArenaAllocGraphChecker,
101   kArenaAllocVerifier,
102   kArenaAllocCallingConvention,
103   kArenaAllocCHA,
104   kArenaAllocScheduler,
105   kArenaAllocProfile,
106   kArenaAllocSuperblockCloner,
107   kNumArenaAllocKinds
108 };
109 
110 template <bool kCount>
111 class ArenaAllocatorStatsImpl;
112 
113 template <>
114 class ArenaAllocatorStatsImpl<false> {
115  public:
116   ArenaAllocatorStatsImpl() = default;
117   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
118   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
119 
Copy(const ArenaAllocatorStatsImpl & other ATTRIBUTE_UNUSED)120   void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
RecordAlloc(size_t bytes ATTRIBUTE_UNUSED,ArenaAllocKind kind ATTRIBUTE_UNUSED)121   void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
NumAllocations()122   size_t NumAllocations() const { return 0u; }
BytesAllocated()123   size_t BytesAllocated() const { return 0u; }
Dump(std::ostream & os ATTRIBUTE_UNUSED,const Arena * first ATTRIBUTE_UNUSED,ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED)124   void Dump(std::ostream& os ATTRIBUTE_UNUSED,
125             const Arena* first ATTRIBUTE_UNUSED,
126             ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
127 };
128 
129 template <bool kCount>
130 class ArenaAllocatorStatsImpl {
131  public:
132   ArenaAllocatorStatsImpl();
133   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
134   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
135 
136   void Copy(const ArenaAllocatorStatsImpl& other);
137   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
138   size_t NumAllocations() const;
139   size_t BytesAllocated() const;
140   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
141 
142  private:
143   size_t num_allocations_;
144   dchecked_vector<size_t> alloc_stats_;  // Bytes used by various allocation kinds.
145 
146   static const char* const kAllocNames[];
147 };
148 
149 typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
150 
151 class ArenaAllocatorMemoryTool {
152  public:
IsRunningOnMemoryTool()153   bool IsRunningOnMemoryTool() { return kMemoryToolIsAvailable; }
154 
MakeDefined(void * ptr,size_t size)155   void MakeDefined(void* ptr, size_t size) {
156     if (UNLIKELY(IsRunningOnMemoryTool())) {
157       DoMakeDefined(ptr, size);
158     }
159   }
MakeUndefined(void * ptr,size_t size)160   void MakeUndefined(void* ptr, size_t size) {
161     if (UNLIKELY(IsRunningOnMemoryTool())) {
162       DoMakeUndefined(ptr, size);
163     }
164   }
MakeInaccessible(void * ptr,size_t size)165   void MakeInaccessible(void* ptr, size_t size) {
166     if (UNLIKELY(IsRunningOnMemoryTool())) {
167       DoMakeInaccessible(ptr, size);
168     }
169   }
170 
171  private:
172   void DoMakeDefined(void* ptr, size_t size);
173   void DoMakeUndefined(void* ptr, size_t size);
174   void DoMakeInaccessible(void* ptr, size_t size);
175 };
176 
177 class Arena {
178  public:
179   Arena();
~Arena()180   virtual ~Arena() { }
181   // Reset is for pre-use and uses memset for performance.
182   void Reset();
183   // Release is used inbetween uses and uses madvise for memory usage.
Release()184   virtual void Release() { }
Begin()185   uint8_t* Begin() {
186     return memory_;
187   }
188 
End()189   uint8_t* End() {
190     return memory_ + size_;
191   }
192 
Size()193   size_t Size() const {
194     return size_;
195   }
196 
RemainingSpace()197   size_t RemainingSpace() const {
198     return Size() - bytes_allocated_;
199   }
200 
GetBytesAllocated()201   size_t GetBytesAllocated() const {
202     return bytes_allocated_;
203   }
204 
205   // Return true if ptr is contained in the arena.
Contains(const void * ptr)206   bool Contains(const void* ptr) const {
207     return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
208   }
209 
210  protected:
211   size_t bytes_allocated_;
212   uint8_t* memory_;
213   size_t size_;
214   Arena* next_;
215   friend class MallocArenaPool;
216   friend class MemMapArenaPool;
217   friend class ArenaAllocator;
218   friend class ArenaStack;
219   friend class ScopedArenaAllocator;
220   template <bool kCount> friend class ArenaAllocatorStatsImpl;
221 
222   friend class ArenaAllocatorTest;
223 
224  private:
225   DISALLOW_COPY_AND_ASSIGN(Arena);
226 };
227 
228 class ArenaPool {
229  public:
230   virtual ~ArenaPool() = default;
231 
232   virtual Arena* AllocArena(size_t size) = 0;
233   virtual void FreeArenaChain(Arena* first) = 0;
234   virtual size_t GetBytesAllocated() const = 0;
235   virtual void ReclaimMemory() = 0;
236   virtual void LockReclaimMemory() = 0;
237   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage.
238   virtual void TrimMaps() = 0;
239 
240  protected:
241   ArenaPool() = default;
242 
243  private:
244   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
245 };
246 
247 // Fast single-threaded allocator for zero-initialized memory chunks.
248 //
249 // Memory is allocated from ArenaPool in large chunks and then rationed through
250 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
251 // is destroyed.
252 class ArenaAllocator
253     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
254  public:
255   explicit ArenaAllocator(ArenaPool* pool);
256   ~ArenaAllocator();
257 
258   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
259   using ArenaAllocatorMemoryTool::MakeDefined;
260   using ArenaAllocatorMemoryTool::MakeUndefined;
261   using ArenaAllocatorMemoryTool::MakeInaccessible;
262 
263   // Get adapter for use in STL containers. See arena_containers.h .
264   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
265 
266   // Returns zeroed memory.
267   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
268     if (UNLIKELY(IsRunningOnMemoryTool())) {
269       return AllocWithMemoryTool(bytes, kind);
270     }
271     bytes = RoundUp(bytes, kAlignment);
272     ArenaAllocatorStats::RecordAlloc(bytes, kind);
273     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
274       return AllocFromNewArena(bytes);
275     }
276     uint8_t* ret = ptr_;
277     DCHECK_ALIGNED(ret, kAlignment);
278     ptr_ += bytes;
279     return ret;
280   }
281 
282   // Returns zeroed memory.
283   void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
284     // It is an error to request 16-byte aligned allocation of unaligned size.
285     DCHECK_ALIGNED(bytes, 16);
286     if (UNLIKELY(IsRunningOnMemoryTool())) {
287       return AllocWithMemoryToolAlign16(bytes, kind);
288     }
289     uintptr_t padding =
290         ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
291     ArenaAllocatorStats::RecordAlloc(bytes, kind);
292     if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
293       static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
294       return AllocFromNewArena(bytes);
295     }
296     ptr_ += padding;
297     uint8_t* ret = ptr_;
298     DCHECK_ALIGNED(ret, 16);
299     ptr_ += bytes;
300     return ret;
301   }
302 
303   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
304   void* Realloc(void* ptr,
305                 size_t ptr_size,
306                 size_t new_size,
307                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
308     DCHECK_GE(new_size, ptr_size);
309     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
310     // We always allocate aligned.
311     const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
312     auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
313     // If we haven't allocated anything else, we can safely extend.
314     if (end == ptr_) {
315       // Red zone prevents end == ptr_ (unless input = allocator state = null).
316       DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
317       const size_t aligned_new_size = RoundUp(new_size, kAlignment);
318       const size_t size_delta = aligned_new_size - aligned_ptr_size;
319       // Check remain space.
320       const size_t remain = end_ - ptr_;
321       if (remain >= size_delta) {
322         ptr_ += size_delta;
323         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
324         DCHECK_ALIGNED(ptr_, kAlignment);
325         return ptr;
326       }
327     }
328     auto* new_ptr = Alloc(new_size, kind);  // Note: Alloc will take care of aligning new_size.
329     memcpy(new_ptr, ptr, ptr_size);
330     // TODO: Call free on ptr if linear alloc supports free.
331     return new_ptr;
332   }
333 
334   template <typename T>
335   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
336     return AllocArray<T>(1, kind);
337   }
338 
339   template <typename T>
340   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
341     return static_cast<T*>(Alloc(length * sizeof(T), kind));
342   }
343 
344   size_t BytesAllocated() const;
345 
346   MemStats GetMemStats() const;
347 
348   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
349   // TODO: Change BytesAllocated to this behavior?
350   size_t BytesUsed() const;
351 
GetArenaPool()352   ArenaPool* GetArenaPool() const {
353     return pool_;
354   }
355 
356   bool Contains(const void* ptr) const;
357 
358   // The alignment guaranteed for individual allocations.
359   static constexpr size_t kAlignment = 8u;
360 
361   // The alignment required for the whole Arena rather than individual allocations.
362   static constexpr size_t kArenaAlignment = 16u;
363 
364  private:
365   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
366   void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
367   uint8_t* AllocFromNewArena(size_t bytes);
368   uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
369 
370   void UpdateBytesAllocated();
371 
372   ArenaPool* pool_;
373   uint8_t* begin_;
374   uint8_t* end_;
375   uint8_t* ptr_;
376   Arena* arena_head_;
377 
378   template <typename U>
379   friend class ArenaAllocatorAdapter;
380 
381   friend class ArenaAllocatorTest;
382 
383   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
384 };  // ArenaAllocator
385 
386 class MemStats {
387  public:
388   MemStats(const char* name,
389            const ArenaAllocatorStats* stats,
390            const Arena* first_arena,
391            ssize_t lost_bytes_adjustment = 0);
392   void Dump(std::ostream& os) const;
393 
394  private:
395   const char* const name_;
396   const ArenaAllocatorStats* const stats_;
397   const Arena* const first_arena_;
398   const ssize_t lost_bytes_adjustment_;
399 };  // MemStats
400 
401 }  // namespace art
402 
403 #endif  // ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
404