• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "mem_map_arena_pool.h"
18 
19 #include <sys/mman.h>
20 
21 #include <algorithm>
22 #include <cstddef>
23 #include <iomanip>
24 #include <numeric>
25 
26 #include <android-base/logging.h>
27 
28 #include "base/arena_allocator-inl.h"
29 #include "base/mem_map.h"
30 #include "base/systrace.h"
31 
32 namespace art {
33 
34 class MemMapArena final : public Arena {
35  public:
36   MemMapArena(size_t size, bool low_4gb, const char* name);
37   virtual ~MemMapArena();
38   void Release() override;
39 
40  private:
41   static MemMap Allocate(size_t size, bool low_4gb, const char* name);
42 
43   MemMap map_;
44 };
45 
MemMapArena(size_t size,bool low_4gb,const char * name)46 MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name)
47     : map_(Allocate(size, low_4gb, name)) {
48   memory_ = map_.Begin();
49   static_assert(ArenaAllocator::kArenaAlignment <= kPageSize,
50                 "Arena should not need stronger alignment than kPageSize.");
51   DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment);
52   size_ = map_.Size();
53 }
54 
Allocate(size_t size,bool low_4gb,const char * name)55 MemMap MemMapArena::Allocate(size_t size, bool low_4gb, const char* name) {
56   // Round up to a full page as that's the smallest unit of allocation for mmap()
57   // and we want to be able to use all memory that we actually allocate.
58   size = RoundUp(size, kPageSize);
59   std::string error_msg;
60   MemMap map = MemMap::MapAnonymous(name,
61                                     size,
62                                     PROT_READ | PROT_WRITE,
63                                     low_4gb,
64                                     &error_msg);
65   CHECK(map.IsValid()) << error_msg;
66   return map;
67 }
68 
~MemMapArena()69 MemMapArena::~MemMapArena() {
70   // Destroys MemMap via std::unique_ptr<>.
71 }
72 
Release()73 void MemMapArena::Release() {
74   if (bytes_allocated_ > 0) {
75     map_.MadviseDontNeedAndZero();
76     bytes_allocated_ = 0;
77   }
78 }
79 
MemMapArenaPool(bool low_4gb,const char * name)80 MemMapArenaPool::MemMapArenaPool(bool low_4gb, const char* name)
81     : low_4gb_(low_4gb),
82       name_(name),
83       free_arenas_(nullptr) {
84   MemMap::Init();
85 }
86 
~MemMapArenaPool()87 MemMapArenaPool::~MemMapArenaPool() {
88   ReclaimMemory();
89 }
90 
ReclaimMemory()91 void MemMapArenaPool::ReclaimMemory() {
92   while (free_arenas_ != nullptr) {
93     Arena* arena = free_arenas_;
94     free_arenas_ = free_arenas_->next_;
95     delete arena;
96   }
97 }
98 
LockReclaimMemory()99 void MemMapArenaPool::LockReclaimMemory() {
100   std::lock_guard<std::mutex> lock(lock_);
101   ReclaimMemory();
102 }
103 
AllocArena(size_t size)104 Arena* MemMapArenaPool::AllocArena(size_t size) {
105   Arena* ret = nullptr;
106   {
107     std::lock_guard<std::mutex> lock(lock_);
108     if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
109       ret = free_arenas_;
110       free_arenas_ = free_arenas_->next_;
111     }
112   }
113   if (ret == nullptr) {
114     ret = new MemMapArena(size, low_4gb_, name_);
115   }
116   ret->Reset();
117   return ret;
118 }
119 
TrimMaps()120 void MemMapArenaPool::TrimMaps() {
121   ScopedTrace trace(__PRETTY_FUNCTION__);
122   std::lock_guard<std::mutex> lock(lock_);
123   for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
124     arena->Release();
125   }
126 }
127 
GetBytesAllocated() const128 size_t MemMapArenaPool::GetBytesAllocated() const {
129   size_t total = 0;
130   std::lock_guard<std::mutex> lock(lock_);
131   for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
132     total += arena->GetBytesAllocated();
133   }
134   return total;
135 }
136 
FreeArenaChain(Arena * first)137 void MemMapArenaPool::FreeArenaChain(Arena* first) {
138   if (kRunningOnMemoryTool) {
139     for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
140       MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
141     }
142   }
143 
144   if (arena_allocator::kArenaAllocatorPreciseTracking) {
145     // Do not reuse arenas when tracking.
146     while (first != nullptr) {
147       Arena* next = first->next_;
148       delete first;
149       first = next;
150     }
151     return;
152   }
153 
154   if (first != nullptr) {
155     Arena* last = first;
156     while (last->next_ != nullptr) {
157       last = last->next_;
158     }
159     std::lock_guard<std::mutex> lock(lock_);
160     last->next_ = free_arenas_;
161     free_arenas_ = first;
162   }
163 }
164 
165 }  // namespace art
166