• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_
18 #define ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_
19 
20 #include <android-base/logging.h>
21 
22 #include "arena_allocator.h"
23 #include "debug_stack.h"
24 #include "globals.h"
25 #include "macros.h"
26 
27 namespace art {
28 
29 class ArenaStack;
30 class ScopedArenaAllocator;
31 
32 template <typename T>
33 class ScopedArenaAllocatorAdapter;
34 
35 // Tag associated with each allocation to help prevent double free.
36 enum class ArenaFreeTag : uint8_t {
37   // Allocation is used and has not yet been destroyed.
38   kUsed,
39   // Allocation has been destroyed.
40   kFree,
41 };
42 
43 // Holds a list of Arenas for use by ScopedArenaAllocator stack.
44 // The memory is returned to the ArenaPool when the ArenaStack is destroyed.
45 class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
46  public:
47   explicit ArenaStack(ArenaPool* arena_pool);
48   ~ArenaStack();
49 
50   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
51   using ArenaAllocatorMemoryTool::MakeDefined;
52   using ArenaAllocatorMemoryTool::MakeUndefined;
53   using ArenaAllocatorMemoryTool::MakeInaccessible;
54 
55   void Reset();
56 
PeakBytesAllocated()57   size_t PeakBytesAllocated() {
58     DebugStackRefCounter::CheckNoRefs();
59     return PeakStats()->BytesAllocated();
60   }
61 
62   size_t ApproximatePeakBytes();
63 
64   MemStats GetPeakStats() const;
65 
66   // Return the arena tag associated with a pointer.
ArenaTagForAllocation(void * ptr)67   static ArenaFreeTag& ArenaTagForAllocation(void* ptr) {
68     DCHECK(kIsDebugBuild) << "Only debug builds have tags";
69     return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
70   }
71 
72   // The alignment guaranteed for individual allocations.
73   static constexpr size_t kAlignment = 8u;
74 
75  private:
76   struct Peak;
77   struct Current;
78   template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
79   struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
StatsAndPoolStatsAndPool80     explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
81     ArenaPool* const pool;
82   };
83 
PeakStats()84   ArenaAllocatorStats* PeakStats() {
85     return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
86   }
87 
PeakStats()88   const ArenaAllocatorStats* PeakStats() const {
89     return static_cast<const TaggedStats<Peak>*>(&stats_and_pool_);
90   }
91 
CurrentStats()92   ArenaAllocatorStats* CurrentStats() {
93     return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
94   }
95 
96   // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
Alloc(size_t bytes,ArenaAllocKind kind)97   void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
98     if (UNLIKELY(IsRunningOnMemoryTool())) {
99       return AllocWithMemoryTool(bytes, kind);
100     }
101     // Add kAlignment for the free or used tag. Required to preserve alignment.
102     size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment);
103     uint8_t* ptr = top_ptr_;
104     if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
105       ptr = AllocateFromNextArena(rounded_bytes);
106     }
107     CurrentStats()->RecordAlloc(bytes, kind);
108     top_ptr_ = ptr + rounded_bytes;
109     if (kIsDebugBuild) {
110       ptr += kAlignment;
111       ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed;
112     }
113     return ptr;
114   }
115 
116   uint8_t* AllocateFromNextArena(size_t rounded_bytes);
117   void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
118   void UpdateBytesAllocated();
119   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
120 
121   StatsAndPool stats_and_pool_;
122   Arena* bottom_arena_;
123   Arena* top_arena_;
124   uint8_t* top_ptr_;
125   uint8_t* top_end_;
126 
127   friend class ScopedArenaAllocator;
128   template <typename T>
129   friend class ScopedArenaAllocatorAdapter;
130 
131   DISALLOW_COPY_AND_ASSIGN(ArenaStack);
132 };
133 
134 // Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized.
135 //
136 // Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived
137 // objects and allows nesting multiple allocators. Only the top allocator can be used but
138 // once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the
139 // stack. This is facilitated by returning the memory to the ArenaStack.
140 class ScopedArenaAllocator
141     : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
142  public:
143   ScopedArenaAllocator(ScopedArenaAllocator&& other) noexcept;
144   explicit ScopedArenaAllocator(ArenaStack* arena_stack);
145   ~ScopedArenaAllocator();
146 
GetArenaStack()147   ArenaStack* GetArenaStack() const {
148     return arena_stack_;
149   }
150 
151   void Reset();
152 
153   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
154     DebugStackReference::CheckTop();
155     return arena_stack_->Alloc(bytes, kind);
156   }
157 
158   template <typename T>
159   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
160     return AllocArray<T>(1, kind);
161   }
162 
163   template <typename T>
164   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
165     return static_cast<T*>(Alloc(length * sizeof(T), kind));
166   }
167 
168   // Get adapter for use in STL containers. See scoped_arena_containers.h .
169   ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
170 
171   size_t ApproximatePeakBytes();
172 
173   // Allow a delete-expression to destroy but not deallocate allocators created by Create().
delete(void * ptr ATTRIBUTE_UNUSED)174   static void operator delete(void* ptr ATTRIBUTE_UNUSED) {}
175 
176  private:
177   ArenaStack* arena_stack_;
178   Arena* mark_arena_;
179   uint8_t* mark_ptr_;
180   uint8_t* mark_end_;
181 
182   void DoReset();
183 
184   template <typename T>
185   friend class ScopedArenaAllocatorAdapter;
186 
187   DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
188 };
189 
190 }  // namespace art
191 
192 #endif  // ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_
193