• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18 #define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
19 
20 #include <stdint.h>
21 #include <stddef.h>
22 
23 #include "base/bit_utils.h"
24 #include "base/memory_tool.h"
25 #include "debug_stack.h"
26 #include "macros.h"
27 #include "mutex.h"
28 
29 namespace art {
30 
31 class Arena;
32 class ArenaPool;
33 class ArenaAllocator;
34 class ArenaStack;
35 class ScopedArenaAllocator;
36 class MemMap;
37 class MemStats;
38 
39 template <typename T>
40 class ArenaAllocatorAdapter;
41 
42 static constexpr bool kArenaAllocatorCountAllocations = false;
43 
44 // Type of allocation for memory tuning.
45 enum ArenaAllocKind {
46   kArenaAllocMisc,
47   kArenaAllocSwitchTable,
48   kArenaAllocSlowPaths,
49   kArenaAllocGrowableBitMap,
50   kArenaAllocSTL,
51   kArenaAllocGraphBuilder,
52   kArenaAllocGraph,
53   kArenaAllocBasicBlock,
54   kArenaAllocBlockList,
55   kArenaAllocReversePostOrder,
56   kArenaAllocLinearOrder,
57   kArenaAllocConstantsMap,
58   kArenaAllocPredecessors,
59   kArenaAllocSuccessors,
60   kArenaAllocDominated,
61   kArenaAllocInstruction,
62   kArenaAllocInvokeInputs,
63   kArenaAllocPhiInputs,
64   kArenaAllocLoopInfo,
65   kArenaAllocLoopInfoBackEdges,
66   kArenaAllocTryCatchInfo,
67   kArenaAllocUseListNode,
68   kArenaAllocEnvironment,
69   kArenaAllocEnvironmentVRegs,
70   kArenaAllocEnvironmentLocations,
71   kArenaAllocLocationSummary,
72   kArenaAllocSsaBuilder,
73   kArenaAllocMoveOperands,
74   kArenaAllocCodeBuffer,
75   kArenaAllocStackMaps,
76   kArenaAllocOptimization,
77   kArenaAllocGvn,
78   kArenaAllocInductionVarAnalysis,
79   kArenaAllocBoundsCheckElimination,
80   kArenaAllocDCE,
81   kArenaAllocLSE,
82   kArenaAllocLICM,
83   kArenaAllocSsaLiveness,
84   kArenaAllocSsaPhiElimination,
85   kArenaAllocReferenceTypePropagation,
86   kArenaAllocSideEffectsAnalysis,
87   kArenaAllocRegisterAllocator,
88   kArenaAllocRegisterAllocatorValidate,
89   kArenaAllocStackMapStream,
90   kArenaAllocCodeGenerator,
91   kArenaAllocAssembler,
92   kArenaAllocParallelMoveResolver,
93   kArenaAllocGraphChecker,
94   kArenaAllocVerifier,
95   kArenaAllocCallingConvention,
96   kNumArenaAllocKinds
97 };
98 
99 template <bool kCount>
100 class ArenaAllocatorStatsImpl;
101 
102 template <>
103 class ArenaAllocatorStatsImpl<false> {
104  public:
105   ArenaAllocatorStatsImpl() = default;
106   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
107   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
108 
Copy(const ArenaAllocatorStatsImpl & other ATTRIBUTE_UNUSED)109   void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
RecordAlloc(size_t bytes ATTRIBUTE_UNUSED,ArenaAllocKind kind ATTRIBUTE_UNUSED)110   void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
NumAllocations()111   size_t NumAllocations() const { return 0u; }
BytesAllocated()112   size_t BytesAllocated() const { return 0u; }
Dump(std::ostream & os ATTRIBUTE_UNUSED,const Arena * first ATTRIBUTE_UNUSED,ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED)113   void Dump(std::ostream& os ATTRIBUTE_UNUSED,
114             const Arena* first ATTRIBUTE_UNUSED,
115             ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
116 };
117 
118 template <bool kCount>
119 class ArenaAllocatorStatsImpl {
120  public:
121   ArenaAllocatorStatsImpl();
122   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
123   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
124 
125   void Copy(const ArenaAllocatorStatsImpl& other);
126   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
127   size_t NumAllocations() const;
128   size_t BytesAllocated() const;
129   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
130 
131  private:
132   size_t num_allocations_;
133   // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
134   size_t alloc_stats_[kNumArenaAllocKinds];  // Bytes used by various allocation kinds.
135 
136   static const char* const kAllocNames[];
137 };
138 
139 typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
140 
141 template <bool kAvailable, bool kValgrind>
142 class ArenaAllocatorMemoryToolCheckImpl {
143   // This is the generic template but since there is a partial specialization
144   // for kValgrind == false, this can be instantiated only for kValgrind == true.
145   static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
146   static_assert(kAvailable, "Valgrind implies memory tool availability.");
147 
148  public:
ArenaAllocatorMemoryToolCheckImpl()149   ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
IsRunningOnMemoryTool()150   bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
151 
152  private:
153   const bool is_running_on_valgrind_;
154 };
155 
156 template <bool kAvailable>
157 class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
158  public:
ArenaAllocatorMemoryToolCheckImpl()159   ArenaAllocatorMemoryToolCheckImpl() { }
IsRunningOnMemoryTool()160   bool IsRunningOnMemoryTool() { return kAvailable; }
161 };
162 
163 typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
164     ArenaAllocatorMemoryToolCheck;
165 
166 class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
167  public:
168   using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
169 
MakeDefined(void * ptr,size_t size)170   void MakeDefined(void* ptr, size_t size) {
171     if (UNLIKELY(IsRunningOnMemoryTool())) {
172       DoMakeDefined(ptr, size);
173     }
174   }
MakeUndefined(void * ptr,size_t size)175   void MakeUndefined(void* ptr, size_t size) {
176     if (UNLIKELY(IsRunningOnMemoryTool())) {
177       DoMakeUndefined(ptr, size);
178     }
179   }
MakeInaccessible(void * ptr,size_t size)180   void MakeInaccessible(void* ptr, size_t size) {
181     if (UNLIKELY(IsRunningOnMemoryTool())) {
182       DoMakeInaccessible(ptr, size);
183     }
184   }
185 
186  private:
187   void DoMakeDefined(void* ptr, size_t size);
188   void DoMakeUndefined(void* ptr, size_t size);
189   void DoMakeInaccessible(void* ptr, size_t size);
190 };
191 
192 class Arena {
193  public:
194   static constexpr size_t kDefaultSize = 128 * KB;
195   Arena();
~Arena()196   virtual ~Arena() { }
197   // Reset is for pre-use and uses memset for performance.
198   void Reset();
199   // Release is used inbetween uses and uses madvise for memory usage.
Release()200   virtual void Release() { }
Begin()201   uint8_t* Begin() {
202     return memory_;
203   }
204 
End()205   uint8_t* End() {
206     return memory_ + size_;
207   }
208 
Size()209   size_t Size() const {
210     return size_;
211   }
212 
RemainingSpace()213   size_t RemainingSpace() const {
214     return Size() - bytes_allocated_;
215   }
216 
GetBytesAllocated()217   size_t GetBytesAllocated() const {
218     return bytes_allocated_;
219   }
220 
221   // Return true if ptr is contained in the arena.
Contains(const void * ptr)222   bool Contains(const void* ptr) const {
223     return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
224   }
225 
226  protected:
227   size_t bytes_allocated_;
228   uint8_t* memory_;
229   size_t size_;
230   Arena* next_;
231   friend class ArenaPool;
232   friend class ArenaAllocator;
233   friend class ArenaStack;
234   friend class ScopedArenaAllocator;
235   template <bool kCount> friend class ArenaAllocatorStatsImpl;
236 
237   friend class ArenaAllocatorTest;
238 
239  private:
240   DISALLOW_COPY_AND_ASSIGN(Arena);
241 };
242 
243 class MallocArena FINAL : public Arena {
244  public:
245   explicit MallocArena(size_t size = Arena::kDefaultSize);
246   virtual ~MallocArena();
247 };
248 
249 class MemMapArena FINAL : public Arena {
250  public:
251   MemMapArena(size_t size, bool low_4gb, const char* name);
252   virtual ~MemMapArena();
253   void Release() OVERRIDE;
254 
255  private:
256   std::unique_ptr<MemMap> map_;
257 };
258 
259 class ArenaPool {
260  public:
261   ArenaPool(bool use_malloc = true,
262             bool low_4gb = false,
263             const char* name = "LinearAlloc");
264   ~ArenaPool();
265   Arena* AllocArena(size_t size) REQUIRES(!lock_);
266   void FreeArenaChain(Arena* first) REQUIRES(!lock_);
267   size_t GetBytesAllocated() const REQUIRES(!lock_);
268   void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
269   void LockReclaimMemory() REQUIRES(!lock_);
270   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
271   // use_malloc is false.
272   void TrimMaps() REQUIRES(!lock_);
273 
274  private:
275   const bool use_malloc_;
276   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
277   Arena* free_arenas_ GUARDED_BY(lock_);
278   const bool low_4gb_;
279   const char* name_;
280   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
281 };
282 
283 // Fast single-threaded allocator for zero-initialized memory chunks.
284 //
285 // Memory is allocated from ArenaPool in large chunks and then rationed through
286 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
287 // is destroyed.
288 class ArenaAllocator
289     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
290  public:
291   explicit ArenaAllocator(ArenaPool* pool);
292   ~ArenaAllocator();
293 
294   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
295   using ArenaAllocatorMemoryTool::MakeDefined;
296   using ArenaAllocatorMemoryTool::MakeUndefined;
297   using ArenaAllocatorMemoryTool::MakeInaccessible;
298 
299   // Get adapter for use in STL containers. See arena_containers.h .
300   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
301 
302   // Returns zeroed memory.
303   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
304     if (UNLIKELY(IsRunningOnMemoryTool())) {
305       return AllocWithMemoryTool(bytes, kind);
306     }
307     bytes = RoundUp(bytes, kAlignment);
308     ArenaAllocatorStats::RecordAlloc(bytes, kind);
309     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
310       return AllocFromNewArena(bytes);
311     }
312     uint8_t* ret = ptr_;
313     ptr_ += bytes;
314     return ret;
315   }
316 
317   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
318   void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
319                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
320     DCHECK_GE(new_size, ptr_size);
321     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
322     auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
323     // If we haven't allocated anything else, we can safely extend.
324     if (end == ptr_) {
325       DCHECK(!IsRunningOnMemoryTool());  // Red zone prevents end == ptr_.
326       const size_t size_delta = new_size - ptr_size;
327       // Check remain space.
328       const size_t remain = end_ - ptr_;
329       if (remain >= size_delta) {
330         ptr_ += size_delta;
331         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
332         return ptr;
333       }
334     }
335     auto* new_ptr = Alloc(new_size, kind);
336     memcpy(new_ptr, ptr, ptr_size);
337     // TODO: Call free on ptr if linear alloc supports free.
338     return new_ptr;
339   }
340 
341   template <typename T>
342   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
343     return AllocArray<T>(1, kind);
344   }
345 
346   template <typename T>
347   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
348     return static_cast<T*>(Alloc(length * sizeof(T), kind));
349   }
350 
351   size_t BytesAllocated() const;
352 
353   MemStats GetMemStats() const;
354 
355   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
356   // TODO: Change BytesAllocated to this behavior?
357   size_t BytesUsed() const;
358 
GetArenaPool()359   ArenaPool* GetArenaPool() const {
360     return pool_;
361   }
362 
363   bool Contains(const void* ptr) const;
364 
365  private:
366   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
367   uint8_t* AllocFromNewArena(size_t bytes);
368 
369   static constexpr size_t kAlignment = 8;
370 
371   void UpdateBytesAllocated();
372 
373   ArenaPool* pool_;
374   uint8_t* begin_;
375   uint8_t* end_;
376   uint8_t* ptr_;
377   Arena* arena_head_;
378 
379   template <typename U>
380   friend class ArenaAllocatorAdapter;
381 
382   friend class ArenaAllocatorTest;
383 
384   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
385 };  // ArenaAllocator
386 
387 class MemStats {
388  public:
389   MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
390            ssize_t lost_bytes_adjustment = 0);
391   void Dump(std::ostream& os) const;
392 
393  private:
394   const char* const name_;
395   const ArenaAllocatorStats* const stats_;
396   const Arena* const first_arena_;
397   const ssize_t lost_bytes_adjustment_;
398 };  // MemStats
399 
400 }  // namespace art
401 
402 #endif  // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
403