• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
18 #define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
19 
20 #include "arena_allocator.h"
21 #include "debug_stack.h"
22 #include "globals.h"
23 #include "logging.h"
24 #include "macros.h"
25 
26 namespace art {
27 
28 class ArenaStack;
29 class ScopedArenaAllocator;
30 
31 template <typename T>
32 class ScopedArenaAllocatorAdapter;
33 
34 // Tag associated with each allocation to help prevent double free.
35 enum class ArenaFreeTag : uint8_t {
36   // Allocation is used and has not yet been destroyed.
37   kUsed,
38   // Allocation has been destroyed.
39   kFree,
40 };
41 
42 // Holds a list of Arenas for use by ScopedArenaAllocator stack.
43 // The memory is returned to the ArenaPool when the ArenaStack is destroyed.
44 class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
45  public:
46   explicit ArenaStack(ArenaPool* arena_pool);
47   ~ArenaStack();
48 
49   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
50   using ArenaAllocatorMemoryTool::MakeDefined;
51   using ArenaAllocatorMemoryTool::MakeUndefined;
52   using ArenaAllocatorMemoryTool::MakeInaccessible;
53 
54   void Reset();
55 
PeakBytesAllocated()56   size_t PeakBytesAllocated() {
57     return PeakStats()->BytesAllocated();
58   }
59 
60   MemStats GetPeakStats() const;
61 
62   // Return the arena tag associated with a pointer.
ArenaTagForAllocation(void * ptr)63   static ArenaFreeTag& ArenaTagForAllocation(void* ptr) {
64     DCHECK(kIsDebugBuild) << "Only debug builds have tags";
65     return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
66   }
67 
68   // The alignment guaranteed for individual allocations.
69   static constexpr size_t kAlignment = 8u;
70 
71  private:
72   struct Peak;
73   struct Current;
74   template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
75   struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
StatsAndPoolStatsAndPool76     explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
77     ArenaPool* const pool;
78   };
79 
PeakStats()80   ArenaAllocatorStats* PeakStats() {
81     return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
82   }
83 
CurrentStats()84   ArenaAllocatorStats* CurrentStats() {
85     return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
86   }
87 
88   // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
Alloc(size_t bytes,ArenaAllocKind kind)89   void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
90     if (UNLIKELY(IsRunningOnMemoryTool())) {
91       return AllocWithMemoryTool(bytes, kind);
92     }
93     // Add kAlignment for the free or used tag. Required to preserve alignment.
94     size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment);
95     uint8_t* ptr = top_ptr_;
96     if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
97       ptr = AllocateFromNextArena(rounded_bytes);
98     }
99     CurrentStats()->RecordAlloc(bytes, kind);
100     top_ptr_ = ptr + rounded_bytes;
101     if (kIsDebugBuild) {
102       ptr += kAlignment;
103       ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed;
104     }
105     return ptr;
106   }
107 
108   uint8_t* AllocateFromNextArena(size_t rounded_bytes);
109   void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
110   void UpdateBytesAllocated();
111   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
112 
113   StatsAndPool stats_and_pool_;
114   Arena* bottom_arena_;
115   Arena* top_arena_;
116   uint8_t* top_ptr_;
117   uint8_t* top_end_;
118 
119   friend class ScopedArenaAllocator;
120   template <typename T>
121   friend class ScopedArenaAllocatorAdapter;
122 
123   DISALLOW_COPY_AND_ASSIGN(ArenaStack);
124 };
125 
126 // Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized.
127 //
128 // Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived
129 // objects and allows nesting multiple allocators. Only the top allocator can be used but
130 // once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the
131 // stack. This is facilitated by returning the memory to the ArenaStack.
132 class ScopedArenaAllocator
133     : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
134  public:
135   // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
136   // the allocator is not exactly a C++ block scope. For example, an optimization
137   // pass can create the scoped allocator in Start() and destroy it in End().
Create(ArenaStack * arena_stack)138   static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
139     void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
140     ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
141     allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
142     return allocator;
143   }
144 
145   explicit ScopedArenaAllocator(ArenaStack* arena_stack);
146   ~ScopedArenaAllocator();
147 
148   void Reset();
149 
150   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
151     DebugStackReference::CheckTop();
152     return arena_stack_->Alloc(bytes, kind);
153   }
154 
155   template <typename T>
156   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
157     return AllocArray<T>(1, kind);
158   }
159 
160   template <typename T>
161   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
162     return static_cast<T*>(Alloc(length * sizeof(T), kind));
163   }
164 
165   // Get adapter for use in STL containers. See scoped_arena_containers.h .
166   ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
167 
168   // Allow a delete-expression to destroy but not deallocate allocators created by Create().
delete(void * ptr ATTRIBUTE_UNUSED)169   static void operator delete(void* ptr ATTRIBUTE_UNUSED) {}
170 
171  private:
172   ArenaStack* const arena_stack_;
173   Arena* mark_arena_;
174   uint8_t* mark_ptr_;
175   uint8_t* mark_end_;
176 
177   void DoReset();
178 
179   template <typename T>
180   friend class ScopedArenaAllocatorAdapter;
181 
182   DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
183 };
184 
185 }  // namespace art
186 
187 #endif  // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
188