• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
18 #define ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
19 
20 #include <stddef.h>
21 #include <stdint.h>
22 
23 #include "bit_utils.h"
24 #include "debug_stack.h"
25 #include "dchecked_vector.h"
26 #include "macros.h"
27 #include "memory_tool.h"
28 
29 namespace art {
30 
31 class Arena;
32 class ArenaPool;
33 class ArenaAllocator;
34 class ArenaStack;
35 class ScopedArenaAllocator;
36 class MemStats;
37 
38 template <typename T>
39 class ArenaAllocatorAdapter;
40 
41 static constexpr bool kArenaAllocatorCountAllocations = false;
42 
43 // Type of allocation for memory tuning.
44 enum ArenaAllocKind {
45   kArenaAllocMisc,
46   kArenaAllocSwitchTable,
47   kArenaAllocSlowPaths,
48   kArenaAllocGrowableBitMap,
49   kArenaAllocSTL,
50   kArenaAllocGraphBuilder,
51   kArenaAllocGraph,
52   kArenaAllocBasicBlock,
53   kArenaAllocBlockList,
54   kArenaAllocReversePostOrder,
55   kArenaAllocLinearOrder,
56   kArenaAllocReachabilityGraph,
57   kArenaAllocConstantsMap,
58   kArenaAllocPredecessors,
59   kArenaAllocSuccessors,
60   kArenaAllocDominated,
61   kArenaAllocInstruction,
62   kArenaAllocConstructorFenceInputs,
63   kArenaAllocInvokeInputs,
64   kArenaAllocPhiInputs,
65   kArenaAllocTypeCheckInputs,
66   kArenaAllocLoopInfo,
67   kArenaAllocLoopInfoBackEdges,
68   kArenaAllocTryCatchInfo,
69   kArenaAllocUseListNode,
70   kArenaAllocEnvironment,
71   kArenaAllocEnvironmentVRegs,
72   kArenaAllocEnvironmentLocations,
73   kArenaAllocLocationSummary,
74   kArenaAllocSsaBuilder,
75   kArenaAllocMoveOperands,
76   kArenaAllocCodeBuffer,
77   kArenaAllocStackMaps,
78   kArenaAllocOptimization,
79   kArenaAllocGvn,
80   kArenaAllocInductionVarAnalysis,
81   kArenaAllocBoundsCheckElimination,
82   kArenaAllocDCE,
83   kArenaAllocLSA,
84   kArenaAllocLSE,
85   kArenaAllocCFRE,
86   kArenaAllocLICM,
87   kArenaAllocWBE,
88   kArenaAllocLoopOptimization,
89   kArenaAllocSsaLiveness,
90   kArenaAllocSsaPhiElimination,
91   kArenaAllocReferenceTypePropagation,
92   kArenaAllocSelectGenerator,
93   kArenaAllocSideEffectsAnalysis,
94   kArenaAllocRegisterAllocator,
95   kArenaAllocRegisterAllocatorValidate,
96   kArenaAllocStackMapStream,
97   kArenaAllocBitTableBuilder,
98   kArenaAllocVectorNode,
99   kArenaAllocCodeGenerator,
100   kArenaAllocAssembler,
101   kArenaAllocParallelMoveResolver,
102   kArenaAllocGraphChecker,
103   kArenaAllocVerifier,
104   kArenaAllocCallingConvention,
105   kArenaAllocCHA,
106   kArenaAllocScheduler,
107   kArenaAllocProfile,
108   kArenaAllocSuperblockCloner,
109   kArenaAllocTransaction,
110   kNumArenaAllocKinds
111 };
112 
113 template <bool kCount>
114 class ArenaAllocatorStatsImpl;
115 
116 template <>
117 class ArenaAllocatorStatsImpl<false> {
118  public:
119   ArenaAllocatorStatsImpl() = default;
120   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
121   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
122 
Copy(const ArenaAllocatorStatsImpl & other ATTRIBUTE_UNUSED)123   void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
RecordAlloc(size_t bytes ATTRIBUTE_UNUSED,ArenaAllocKind kind ATTRIBUTE_UNUSED)124   void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
NumAllocations()125   size_t NumAllocations() const { return 0u; }
BytesAllocated()126   size_t BytesAllocated() const { return 0u; }
Dump(std::ostream & os ATTRIBUTE_UNUSED,const Arena * first ATTRIBUTE_UNUSED,ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED)127   void Dump(std::ostream& os ATTRIBUTE_UNUSED,
128             const Arena* first ATTRIBUTE_UNUSED,
129             ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
130 };
131 
132 template <bool kCount>
133 class ArenaAllocatorStatsImpl {
134  public:
135   ArenaAllocatorStatsImpl();
136   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
137   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
138 
139   void Copy(const ArenaAllocatorStatsImpl& other);
140   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
141   size_t NumAllocations() const;
142   size_t BytesAllocated() const;
143   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
144 
145  private:
146   size_t num_allocations_;
147   dchecked_vector<size_t> alloc_stats_;  // Bytes used by various allocation kinds.
148 
149   static const char* const kAllocNames[];
150 };
151 
152 using ArenaAllocatorStats = ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
153 
154 class ArenaAllocatorMemoryTool {
155  public:
IsRunningOnMemoryTool()156   static constexpr bool IsRunningOnMemoryTool() { return kMemoryToolIsAvailable; }
157 
MakeDefined(void * ptr,size_t size)158   void MakeDefined(void* ptr, size_t size) {
159     if (UNLIKELY(IsRunningOnMemoryTool())) {
160       DoMakeDefined(ptr, size);
161     }
162   }
MakeUndefined(void * ptr,size_t size)163   void MakeUndefined(void* ptr, size_t size) {
164     if (UNLIKELY(IsRunningOnMemoryTool())) {
165       DoMakeUndefined(ptr, size);
166     }
167   }
MakeInaccessible(void * ptr,size_t size)168   void MakeInaccessible(void* ptr, size_t size) {
169     if (UNLIKELY(IsRunningOnMemoryTool())) {
170       DoMakeInaccessible(ptr, size);
171     }
172   }
173 
174  private:
175   void DoMakeDefined(void* ptr, size_t size);
176   void DoMakeUndefined(void* ptr, size_t size);
177   void DoMakeInaccessible(void* ptr, size_t size);
178 };
179 
180 class Arena {
181  public:
Arena()182   Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) {}
183 
~Arena()184   virtual ~Arena() { }
185   // Reset is for pre-use and uses memset for performance.
186   void Reset();
187   // Release is used inbetween uses and uses madvise for memory usage.
Release()188   virtual void Release() { }
Begin()189   uint8_t* Begin() const {
190     return memory_;
191   }
192 
End()193   uint8_t* End() const { return memory_ + size_; }
194 
Size()195   size_t Size() const {
196     return size_;
197   }
198 
RemainingSpace()199   size_t RemainingSpace() const {
200     return Size() - bytes_allocated_;
201   }
202 
GetBytesAllocated()203   size_t GetBytesAllocated() const {
204     return bytes_allocated_;
205   }
206 
207   // Return true if ptr is contained in the arena.
Contains(const void * ptr)208   bool Contains(const void* ptr) const { return memory_ <= ptr && ptr < memory_ + size_; }
209 
Next()210   Arena* Next() const { return next_; }
211 
212  protected:
213   size_t bytes_allocated_;
214   uint8_t* memory_;
215   size_t size_;
216   Arena* next_;
217   friend class MallocArenaPool;
218   friend class MemMapArenaPool;
219   friend class ArenaAllocator;
220   friend class ArenaStack;
221   friend class ScopedArenaAllocator;
222   template <bool kCount> friend class ArenaAllocatorStatsImpl;
223 
224   friend class ArenaAllocatorTest;
225 
226  private:
227   DISALLOW_COPY_AND_ASSIGN(Arena);
228 };
229 
230 class ArenaPool {
231  public:
232   virtual ~ArenaPool() = default;
233 
234   virtual Arena* AllocArena(size_t size) = 0;
235   virtual void FreeArenaChain(Arena* first) = 0;
236   virtual size_t GetBytesAllocated() const = 0;
237   virtual void ReclaimMemory() = 0;
238   virtual void LockReclaimMemory() = 0;
239   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage.
240   virtual void TrimMaps() = 0;
241 
242  protected:
243   ArenaPool() = default;
244 
245  private:
246   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
247 };
248 
249 // Fast single-threaded allocator for zero-initialized memory chunks.
250 //
251 // Memory is allocated from ArenaPool in large chunks and then rationed through
252 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
253 // is destroyed.
254 class ArenaAllocator
255     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
256  public:
257   explicit ArenaAllocator(ArenaPool* pool);
258   ~ArenaAllocator();
259 
260   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
261   using ArenaAllocatorMemoryTool::MakeDefined;
262   using ArenaAllocatorMemoryTool::MakeUndefined;
263   using ArenaAllocatorMemoryTool::MakeInaccessible;
264 
265   // Get adapter for use in STL containers. See arena_containers.h .
266   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
267 
268   // Returns zeroed memory.
269   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
270     if (UNLIKELY(IsRunningOnMemoryTool())) {
271       return AllocWithMemoryTool(bytes, kind);
272     }
273     bytes = RoundUp(bytes, kAlignment);
274     ArenaAllocatorStats::RecordAlloc(bytes, kind);
275     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
276       return AllocFromNewArena(bytes);
277     }
278     uint8_t* ret = ptr_;
279     DCHECK_ALIGNED(ret, kAlignment);
280     ptr_ += bytes;
281     return ret;
282   }
283 
284   // Returns zeroed memory.
285   void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
286     // It is an error to request 16-byte aligned allocation of unaligned size.
287     DCHECK_ALIGNED(bytes, 16);
288     if (UNLIKELY(IsRunningOnMemoryTool())) {
289       return AllocWithMemoryToolAlign16(bytes, kind);
290     }
291     uintptr_t padding =
292         RoundUp(reinterpret_cast<uintptr_t>(ptr_), 16) - reinterpret_cast<uintptr_t>(ptr_);
293     ArenaAllocatorStats::RecordAlloc(bytes, kind);
294     if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
295       static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
296       return AllocFromNewArena(bytes);
297     }
298     ptr_ += padding;
299     uint8_t* ret = ptr_;
300     DCHECK_ALIGNED(ret, 16);
301     ptr_ += bytes;
302     return ret;
303   }
304 
305   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
306   void* Realloc(void* ptr,
307                 size_t ptr_size,
308                 size_t new_size,
309                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
310     DCHECK_GE(new_size, ptr_size);
311     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
312     // We always allocate aligned.
313     const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
314     auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
315     // If we haven't allocated anything else, we can safely extend.
316     if (end == ptr_) {
317       // Red zone prevents end == ptr_ (unless input = allocator state = null).
318       DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
319       const size_t aligned_new_size = RoundUp(new_size, kAlignment);
320       const size_t size_delta = aligned_new_size - aligned_ptr_size;
321       // Check remain space.
322       const size_t remain = end_ - ptr_;
323       if (remain >= size_delta) {
324         ptr_ += size_delta;
325         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
326         DCHECK_ALIGNED(ptr_, kAlignment);
327         return ptr;
328       }
329     }
330     auto* new_ptr = Alloc(new_size, kind);  // Note: Alloc will take care of aligning new_size.
331     memcpy(new_ptr, ptr, ptr_size);
332     // TODO: Call free on ptr if linear alloc supports free.
333     return new_ptr;
334   }
335 
336   template <typename T>
337   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
338     return AllocArray<T>(1, kind);
339   }
340 
341   template <typename T>
342   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
343     return static_cast<T*>(Alloc(length * sizeof(T), kind));
344   }
345 
346   size_t BytesAllocated() const;
347 
348   MemStats GetMemStats() const;
349 
350   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
351   // TODO: Change BytesAllocated to this behavior?
352   size_t BytesUsed() const;
353 
GetArenaPool()354   ArenaPool* GetArenaPool() const {
355     return pool_;
356   }
357 
GetHeadArena()358   Arena* GetHeadArena() const {
359     return arena_head_;
360   }
361 
CurrentPtr()362   uint8_t* CurrentPtr() const {
363     return ptr_;
364   }
365 
CurrentArenaUnusedBytes()366   size_t CurrentArenaUnusedBytes() const {
367     DCHECK_LE(ptr_, end_);
368     return end_ - ptr_;
369   }
370   // Resets the current arena in use, which will force us to get a new arena
371   // on next allocation.
372   void ResetCurrentArena();
373 
374   bool Contains(const void* ptr) const;
375 
376   // The alignment guaranteed for individual allocations.
377   static constexpr size_t kAlignment = 8u;
378 
379   // The alignment required for the whole Arena rather than individual allocations.
380   static constexpr size_t kArenaAlignment = 16u;
381 
382   // Extra bytes required by the memory tool.
383   static constexpr size_t kMemoryToolRedZoneBytes = 8u;
384 
385  private:
386   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
387   void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
388   uint8_t* AllocFromNewArena(size_t bytes);
389   uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
390 
391   void UpdateBytesAllocated();
392 
393   ArenaPool* pool_;
394   uint8_t* begin_;
395   uint8_t* end_;
396   uint8_t* ptr_;
397   Arena* arena_head_;
398 
399   template <typename U>
400   friend class ArenaAllocatorAdapter;
401 
402   friend class ArenaAllocatorTest;
403 
404   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
405 };  // ArenaAllocator
406 
407 class MemStats {
408  public:
409   MemStats(const char* name,
410            const ArenaAllocatorStats* stats,
411            const Arena* first_arena,
412            ssize_t lost_bytes_adjustment = 0);
413   void Dump(std::ostream& os) const;
414 
415  private:
416   const char* const name_;
417   const ArenaAllocatorStats* const stats_;
418   const Arena* const first_arena_;
419   const ssize_t lost_bytes_adjustment_;
420 };  // MemStats
421 
422 }  // namespace art
423 
424 #endif  // ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
425