• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <algorithm>
18 #include <numeric>
19 
20 #include "arena_allocator.h"
21 #include "base/logging.h"
22 #include "base/mutex.h"
23 #include "thread-inl.h"
24 #include <memcheck/memcheck.h>
25 
26 namespace art {
27 
28 // Memmap is a bit slower than malloc according to my measurements.
29 static constexpr bool kUseMemMap = false;
30 static constexpr bool kUseMemSet = true && kUseMemMap;
31 static constexpr size_t kValgrindRedZoneBytes = 8;
32 constexpr size_t Arena::kDefaultSize;
33 
34 template <bool kCount>
35 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
36   "Misc       ",
37   "BasicBlock ",
38   "LIR        ",
39   "LIR masks  ",
40   "MIR        ",
41   "DataFlow   ",
42   "GrowList   ",
43   "GrowBitMap ",
44   "Dalvik2SSA ",
45   "DebugInfo  ",
46   "Successor  ",
47   "RegAlloc   ",
48   "Data       ",
49   "Preds      ",
50   "STL        ",
51 };
52 
53 template <bool kCount>
ArenaAllocatorStatsImpl()54 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
55     : num_allocations_(0u) {
56   std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
57 }
58 
59 template <bool kCount>
Copy(const ArenaAllocatorStatsImpl & other)60 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
61   num_allocations_ = other.num_allocations_;
62   std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
63 }
64 
65 template <bool kCount>
RecordAlloc(size_t bytes,ArenaAllocKind kind)66 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
67   alloc_stats_[kind] += bytes;
68   ++num_allocations_;
69 }
70 
71 template <bool kCount>
NumAllocations() const72 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
73   return num_allocations_;
74 }
75 
76 template <bool kCount>
BytesAllocated() const77 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
78   const size_t init = 0u;  // Initial value of the correct type.
79   return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
80 }
81 
82 template <bool kCount>
Dump(std::ostream & os,const Arena * first,ssize_t lost_bytes_adjustment) const83 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
84                                            ssize_t lost_bytes_adjustment) const {
85   size_t malloc_bytes = 0u;
86   size_t lost_bytes = 0u;
87   size_t num_arenas = 0u;
88   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
89     malloc_bytes += arena->Size();
90     lost_bytes += arena->RemainingSpace();
91     ++num_arenas;
92   }
93   // The lost_bytes_adjustment is used to make up for the fact that the current arena
94   // may not have the bytes_allocated_ updated correctly.
95   lost_bytes += lost_bytes_adjustment;
96   const size_t bytes_allocated = BytesAllocated();
97   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
98      << ", lost: " << lost_bytes << "\n";
99   size_t num_allocations = NumAllocations();
100   if (num_allocations != 0) {
101     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
102        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
103   }
104   os << "===== Allocation by kind\n";
105   COMPILE_ASSERT(arraysize(kAllocNames) == kNumArenaAllocKinds, check_arraysize_kAllocNames);
106   for (int i = 0; i < kNumArenaAllocKinds; i++) {
107       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
108   }
109 }
110 
111 // Explicitly instantiate the used implementation.
112 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
113 
Arena(size_t size)114 Arena::Arena(size_t size)
115     : bytes_allocated_(0),
116       map_(nullptr),
117       next_(nullptr) {
118   if (kUseMemMap) {
119     std::string error_msg;
120     map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
121                                 &error_msg);
122     CHECK(map_ != nullptr) << error_msg;
123     memory_ = map_->Begin();
124     size_ = map_->Size();
125   } else {
126     memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
127     size_ = size;
128   }
129 }
130 
~Arena()131 Arena::~Arena() {
132   if (kUseMemMap) {
133     delete map_;
134   } else {
135     free(reinterpret_cast<void*>(memory_));
136   }
137 }
138 
Reset()139 void Arena::Reset() {
140   if (bytes_allocated_) {
141     if (kUseMemSet || !kUseMemMap) {
142       memset(Begin(), 0, bytes_allocated_);
143     } else {
144       map_->MadviseDontNeedAndZero();
145     }
146     bytes_allocated_ = 0;
147   }
148 }
149 
ArenaPool()150 ArenaPool::ArenaPool()
151     : lock_("Arena pool lock"),
152       free_arenas_(nullptr) {
153 }
154 
~ArenaPool()155 ArenaPool::~ArenaPool() {
156   while (free_arenas_ != nullptr) {
157     auto* arena = free_arenas_;
158     free_arenas_ = free_arenas_->next_;
159     delete arena;
160   }
161 }
162 
AllocArena(size_t size)163 Arena* ArenaPool::AllocArena(size_t size) {
164   Thread* self = Thread::Current();
165   Arena* ret = nullptr;
166   {
167     MutexLock lock(self, lock_);
168     if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
169       ret = free_arenas_;
170       free_arenas_ = free_arenas_->next_;
171     }
172   }
173   if (ret == nullptr) {
174     ret = new Arena(size);
175   }
176   ret->Reset();
177   return ret;
178 }
179 
FreeArenaChain(Arena * first)180 void ArenaPool::FreeArenaChain(Arena* first) {
181   if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
182     for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
183       VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
184     }
185   }
186   if (first != nullptr) {
187     Arena* last = first;
188     while (last->next_ != nullptr) {
189       last = last->next_;
190     }
191     Thread* self = Thread::Current();
192     MutexLock lock(self, lock_);
193     last->next_ = free_arenas_;
194     free_arenas_ = first;
195   }
196 }
197 
BytesAllocated() const198 size_t ArenaAllocator::BytesAllocated() const {
199   return ArenaAllocatorStats::BytesAllocated();
200 }
201 
ArenaAllocator(ArenaPool * pool)202 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
203   : pool_(pool),
204     begin_(nullptr),
205     end_(nullptr),
206     ptr_(nullptr),
207     arena_head_(nullptr),
208     running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
209 }
210 
UpdateBytesAllocated()211 void ArenaAllocator::UpdateBytesAllocated() {
212   if (arena_head_ != nullptr) {
213     // Update how many bytes we have allocated into the arena so that the arena pool knows how
214     // much memory to zero out.
215     arena_head_->bytes_allocated_ = ptr_ - begin_;
216   }
217 }
218 
AllocValgrind(size_t bytes,ArenaAllocKind kind)219 void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
220   size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
221   if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
222     // Obtain a new block.
223     ObtainNewArenaForAllocation(rounded_bytes);
224     if (UNLIKELY(ptr_ == nullptr)) {
225       return nullptr;
226     }
227   }
228   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
229   uint8_t* ret = ptr_;
230   ptr_ += rounded_bytes;
231   // Check that the memory is already zeroed out.
232   for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
233     CHECK_EQ(*ptr, 0U);
234   }
235   VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
236   return ret;
237 }
238 
~ArenaAllocator()239 ArenaAllocator::~ArenaAllocator() {
240   // Reclaim all the arenas by giving them back to the thread pool.
241   UpdateBytesAllocated();
242   pool_->FreeArenaChain(arena_head_);
243 }
244 
ObtainNewArenaForAllocation(size_t allocation_size)245 void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
246   UpdateBytesAllocated();
247   Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
248   new_arena->next_ = arena_head_;
249   arena_head_ = new_arena;
250   // Update our internal data structures.
251   ptr_ = begin_ = new_arena->Begin();
252   end_ = new_arena->End();
253 }
254 
MemStats(const char * name,const ArenaAllocatorStats * stats,const Arena * first_arena,ssize_t lost_bytes_adjustment)255 MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
256                    ssize_t lost_bytes_adjustment)
257     : name_(name),
258       stats_(stats),
259       first_arena_(first_arena),
260       lost_bytes_adjustment_(lost_bytes_adjustment) {
261 }
262 
Dump(std::ostream & os) const263 void MemStats::Dump(std::ostream& os) const {
264   os << name_ << " stats:\n";
265   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
266 }
267 
268 // Dump memory usage stats.
GetMemStats() const269 MemStats ArenaAllocator::GetMemStats() const {
270   ssize_t lost_bytes_adjustment =
271       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
272   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
273 }
274 
275 }  // namespace art
276