• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2008 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "space_bitmap-inl.h"
18 
19 #include <iomanip>
20 #include <sstream>
21 
22 #include "android-base/stringprintf.h"
23 
24 #include "art_field-inl.h"
25 #include "base/mem_map.h"
26 #include "dex/dex_file-inl.h"
27 #include "mirror/class-inl.h"
28 #include "mirror/object-inl.h"
29 #include "mirror/object_array.h"
30 
31 namespace art {
32 namespace gc {
33 namespace accounting {
34 
35 using android::base::StringPrintf;
36 
37 template<size_t kAlignment>
ComputeBitmapSize(uint64_t capacity)38 size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) {
39   // Number of space (heap) bytes covered by one bitmap word.
40   // (Word size in bytes = `sizeof(intptr_t)`, which is expected to be
41   // 4 on a 32-bit architecture and 8 on a 64-bit one.)
42   const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT;
43   // Calculate the number of words required to cover a space (heap)
44   // having a size of `capacity` bytes.
45   return (RoundUp(capacity, kBytesCoveredPerWord) / kBytesCoveredPerWord) * sizeof(intptr_t);
46 }
47 
48 template<size_t kAlignment>
ComputeHeapSize(uint64_t bitmap_bytes)49 size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) {
50   return bitmap_bytes * kBitsPerByte * kAlignment;
51 }
52 
53 template<size_t kAlignment>
CreateFromMemMap(const std::string & name,MemMap && mem_map,uint8_t * heap_begin,size_t heap_capacity)54 SpaceBitmap<kAlignment> SpaceBitmap<kAlignment>::CreateFromMemMap(
55     const std::string& name, MemMap&& mem_map, uint8_t* heap_begin, size_t heap_capacity) {
56   CHECK(mem_map.IsValid());
57   uintptr_t* bitmap_begin = reinterpret_cast<uintptr_t*>(mem_map.Begin());
58   const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
59   return { name, std::move(mem_map), bitmap_begin, bitmap_size, heap_begin, heap_capacity };
60 }
61 
62 template<size_t kAlignment>
SpaceBitmap(const std::string & name,MemMap && mem_map,uintptr_t * bitmap_begin,size_t bitmap_size,const void * heap_begin,size_t heap_capacity)63 SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name,
64                                      MemMap&& mem_map,
65                                      uintptr_t* bitmap_begin,
66                                      size_t bitmap_size,
67                                      const void* heap_begin,
68                                      size_t heap_capacity)
69     : mem_map_(std::move(mem_map)),
70       bitmap_begin_(reinterpret_cast<Atomic<uintptr_t>*>(bitmap_begin)),
71       bitmap_size_(bitmap_size),
72       heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
73       heap_limit_(reinterpret_cast<uintptr_t>(heap_begin) + heap_capacity),
74       name_(name) {
75   CHECK(bitmap_begin_ != nullptr);
76   CHECK_NE(bitmap_size, 0U);
77 }
78 
79 template<size_t kAlignment>
~SpaceBitmap()80 SpaceBitmap<kAlignment>::~SpaceBitmap() {}
81 
82 template<size_t kAlignment>
Create(const std::string & name,uint8_t * heap_begin,size_t heap_capacity)83 SpaceBitmap<kAlignment> SpaceBitmap<kAlignment>::Create(
84     const std::string& name, uint8_t* heap_begin, size_t heap_capacity) {
85   // Round up since `heap_capacity` is not necessarily a multiple of `kAlignment * kBitsPerIntPtrT`
86   // (we represent one word as an `intptr_t`).
87   const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
88   std::string error_msg;
89   MemMap mem_map = MemMap::MapAnonymous(name.c_str(),
90                                         bitmap_size,
91                                         PROT_READ | PROT_WRITE,
92                                         /*low_4gb=*/ false,
93                                         &error_msg);
94   if (UNLIKELY(!mem_map.IsValid())) {
95     LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
96     return SpaceBitmap<kAlignment>();
97   }
98   return CreateFromMemMap(name, std::move(mem_map), heap_begin, heap_capacity);
99 }
100 
101 template<size_t kAlignment>
SetHeapLimit(uintptr_t new_end)102 void SpaceBitmap<kAlignment>::SetHeapLimit(uintptr_t new_end) {
103   DCHECK_ALIGNED(new_end, kBitsPerIntPtrT * kAlignment);
104   size_t new_size = OffsetToIndex(new_end - heap_begin_) * sizeof(intptr_t);
105   if (new_size < bitmap_size_) {
106     bitmap_size_ = new_size;
107   }
108   heap_limit_ = new_end;
109   // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity
110   // should be marked.
111 }
112 
113 template<size_t kAlignment>
Dump() const114 std::string SpaceBitmap<kAlignment>::Dump() const {
115   return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast<void*>(HeapBegin()),
116                       reinterpret_cast<void*>(HeapLimit()));
117 }
118 
119 template <size_t kAlignment>
DumpMemAround(mirror::Object * obj) const120 std::string SpaceBitmap<kAlignment>::DumpMemAround(mirror::Object* obj) const {
121   uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
122   DCHECK_GE(addr, heap_begin_);
123   DCHECK(HasAddress(obj)) << obj;
124   const uintptr_t offset = addr - heap_begin_;
125   const size_t index = OffsetToIndex(offset);
126   const uintptr_t mask = OffsetToMask(offset);
127   size_t num_entries = bitmap_size_ / sizeof(uintptr_t);
128   DCHECK_LT(index, num_entries) << " bitmap_size_ = " << bitmap_size_;
129   Atomic<uintptr_t>* atomic_entry = &bitmap_begin_[index];
130   uintptr_t prev = 0;
131   uintptr_t next = 0;
132   if (index > 0) {
133     prev = (atomic_entry - 1)->load(std::memory_order_relaxed);
134   }
135   uintptr_t curr = atomic_entry->load(std::memory_order_relaxed);
136   if (index < num_entries - 1) {
137     next = (atomic_entry + 1)->load(std::memory_order_relaxed);
138   }
139   std::ostringstream oss;
140   oss << " offset: " << offset
141       << " index: " << index
142       << " mask: " << std::hex << std::setfill('0') << std::setw(16) << mask
143       << " words {" << std::hex << std::setfill('0') << std::setw(16) << prev
144       << ", " << std::hex << std::setfill('0') << std::setw(16) << curr
145       << ", " << std::hex <<std::setfill('0') << std::setw(16) << next
146       << "}";
147   return oss.str();
148 }
149 
150 template<size_t kAlignment>
Clear()151 void SpaceBitmap<kAlignment>::Clear() {
152   if (bitmap_begin_ != nullptr) {
153     mem_map_.MadviseDontNeedAndZero();
154   }
155 }
156 
157 template<size_t kAlignment>
ClearRange(const mirror::Object * begin,const mirror::Object * end)158 void SpaceBitmap<kAlignment>::ClearRange(const mirror::Object* begin, const mirror::Object* end) {
159   uintptr_t begin_offset = reinterpret_cast<uintptr_t>(begin) - heap_begin_;
160   uintptr_t end_offset = reinterpret_cast<uintptr_t>(end) - heap_begin_;
161   // Align begin and end to bitmap word boundaries.
162   while (begin_offset < end_offset && OffsetBitIndex(begin_offset) != 0) {
163     Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + begin_offset));
164     begin_offset += kAlignment;
165   }
166   while (begin_offset < end_offset && OffsetBitIndex(end_offset) != 0) {
167     end_offset -= kAlignment;
168     Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + end_offset));
169   }
170   // Bitmap word boundaries.
171   const uintptr_t start_index = OffsetToIndex(begin_offset);
172   const uintptr_t end_index = OffsetToIndex(end_offset);
173   ZeroAndReleasePages(reinterpret_cast<uint8_t*>(&bitmap_begin_[start_index]),
174                       (end_index - start_index) * sizeof(*bitmap_begin_));
175 }
176 
177 template<size_t kAlignment>
CopyFrom(SpaceBitmap * source_bitmap)178 void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
179   DCHECK_EQ(Size(), source_bitmap->Size());
180   const size_t count = source_bitmap->Size() / sizeof(intptr_t);
181   Atomic<uintptr_t>* const src = source_bitmap->Begin();
182   Atomic<uintptr_t>* const dest = Begin();
183   for (size_t i = 0; i < count; ++i) {
184     dest[i].store(src[i].load(std::memory_order_relaxed), std::memory_order_relaxed);
185   }
186 }
187 
188 template<size_t kAlignment>
SweepWalk(const SpaceBitmap<kAlignment> & live_bitmap,const SpaceBitmap<kAlignment> & mark_bitmap,uintptr_t sweep_begin,uintptr_t sweep_end,SpaceBitmap::SweepCallback * callback,void * arg)189 void SpaceBitmap<kAlignment>::SweepWalk(const SpaceBitmap<kAlignment>& live_bitmap,
190                                         const SpaceBitmap<kAlignment>& mark_bitmap,
191                                         uintptr_t sweep_begin, uintptr_t sweep_end,
192                                         SpaceBitmap::SweepCallback* callback, void* arg) {
193   CHECK(live_bitmap.bitmap_begin_ != nullptr);
194   CHECK(mark_bitmap.bitmap_begin_ != nullptr);
195   CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
196   CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
197   CHECK(callback != nullptr);
198   CHECK_LE(sweep_begin, sweep_end);
199   CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
200 
201   if (sweep_end <= sweep_begin) {
202     return;
203   }
204 
205   size_t buffer_size = sizeof(intptr_t) * kBitsPerIntPtrT;
206   Atomic<uintptr_t>* live = live_bitmap.bitmap_begin_;
207   Atomic<uintptr_t>* mark = mark_bitmap.bitmap_begin_;
208   const size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
209   const size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_ - 1);
210   CHECK_LT(end, live_bitmap.Size() / sizeof(intptr_t));
211 
212   if (Runtime::Current()->IsRunningOnMemoryTool()) {
213     // For memory tool, make the buffer large enough to hold all allocations. This is done since
214     // we get the size of objects (and hence read the class) inside of the freeing logic. This can
215     // cause crashes for unloaded classes since the class may get zeroed out before it is read.
216     // See b/131542326
217     for (size_t i = start; i <= end; i++) {
218       uintptr_t garbage =
219           live[i].load(std::memory_order_relaxed) & ~mark[i].load(std::memory_order_relaxed);
220       buffer_size += POPCOUNT(garbage);
221     }
222   }
223   std::vector<mirror::Object*> pointer_buf(buffer_size);
224   mirror::Object** cur_pointer = &pointer_buf[0];
225   mirror::Object** pointer_end = cur_pointer + (buffer_size - kBitsPerIntPtrT);
226 
227   for (size_t i = start; i <= end; i++) {
228     uintptr_t garbage =
229         live[i].load(std::memory_order_relaxed) & ~mark[i].load(std::memory_order_relaxed);
230     if (UNLIKELY(garbage != 0)) {
231       uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
232       do {
233         const size_t shift = CTZ(garbage);
234         garbage ^= (static_cast<uintptr_t>(1)) << shift;
235         *cur_pointer++ = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
236       } while (garbage != 0);
237       // Make sure that there are always enough slots available for an
238       // entire word of one bits.
239       if (cur_pointer >= pointer_end) {
240         (*callback)(cur_pointer - &pointer_buf[0], &pointer_buf[0], arg);
241         cur_pointer  = &pointer_buf[0];
242       }
243     }
244   }
245   if (cur_pointer > &pointer_buf[0]) {
246     (*callback)(cur_pointer - &pointer_buf[0], &pointer_buf[0], arg);
247   }
248 }
249 
250 template class SpaceBitmap<kObjectAlignment>;
251 template class SpaceBitmap<kPageSize>;
252 
253 }  // namespace accounting
254 }  // namespace gc
255 }  // namespace art
256