• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2008 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
18 #define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
19 
20 #include <limits.h>
21 #include <stdint.h>
22 #include <memory>
23 #include <set>
24 #include <vector>
25 
26 #include "base/locks.h"
27 #include "base/mem_map.h"
28 #include "runtime_globals.h"
29 
30 namespace art HIDDEN {
31 
32 namespace mirror {
33 class Class;
34 class Object;
35 }  // namespace mirror
36 
37 namespace gc {
38 namespace accounting {
39 
40 template<size_t kAlignment>
41 class SpaceBitmap {
42  public:
43   using ScanCallback = void(mirror::Object* obj, void* finger, void* arg);
44   using SweepCallback = void(size_t ptr_count, mirror::Object** ptrs, void* arg);
45 
46   // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
47   // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
48   EXPORT static SpaceBitmap Create(const std::string& name,
49                                    uint8_t* heap_begin,
50                                    size_t heap_capacity);
51 
52   // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
53   // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
54   // Objects are kAlignement-aligned.
55   static SpaceBitmap CreateFromMemMap(const std::string& name,
56                                       MemMap&& mem_map,
57                                       uint8_t* heap_begin,
58                                       size_t heap_capacity);
59 
60   EXPORT ~SpaceBitmap();
61 
62   // Return the bitmap word index corresponding to memory offset (relative to
63   // `HeapBegin()`) `offset`.
64   // See also SpaceBitmap::OffsetBitIndex.
65   //
66   // <offset> is the difference from .base to a pointer address.
67   // <index> is the index of .bits that contains the bit representing
68   //         <offset>.
OffsetToIndex(size_t offset)69   static constexpr size_t OffsetToIndex(size_t offset) {
70     return offset / kAlignment / kBitsPerIntPtrT;
71   }
72 
73   // Return the memory offset (relative to `HeapBegin()`) corresponding to
74   // bitmap word index `index`.
75   template<typename T>
IndexToOffset(T index)76   static constexpr T IndexToOffset(T index) {
77     return static_cast<T>(index * kAlignment * kBitsPerIntPtrT);
78   }
79 
80   // Return the bit within the bitmap word index corresponding to
81   // memory offset (relative to `HeapBegin()`) `offset`.
82   // See also SpaceBitmap::OffsetToIndex.
OffsetBitIndex(uintptr_t offset)83   ALWAYS_INLINE static constexpr uintptr_t OffsetBitIndex(uintptr_t offset) {
84     return (offset / kAlignment) % kBitsPerIntPtrT;
85   }
86 
87   // Return the word-wide bit mask corresponding to `OffsetBitIndex(offset)`.
88   // Bits are packed in the obvious way.
OffsetToMask(uintptr_t offset)89   static constexpr uintptr_t OffsetToMask(uintptr_t offset) {
90     return static_cast<size_t>(1) << OffsetBitIndex(offset);
91   }
92 
93   // Set the bit corresponding to `obj` in the bitmap and return the previous value of that bit.
Set(const mirror::Object * obj)94   bool Set(const mirror::Object* obj) ALWAYS_INLINE {
95     return Modify<true>(obj);
96   }
97 
98   // Clear the bit corresponding to `obj` in the bitmap and return the previous value of that bit.
Clear(const mirror::Object * obj)99   bool Clear(const mirror::Object* obj) ALWAYS_INLINE {
100     return Modify<false>(obj);
101   }
102 
103   // Returns true if the object was previously marked.
104   bool AtomicTestAndSet(const mirror::Object* obj);
105 
106   // Fill the bitmap with zeroes.  Returns the bitmap's memory to the system as a side-effect.
107   // If `release_eagerly` is true, this method will also try to give back the
108   // memory to the OS eagerly.
109   void Clear(bool release_eagerly = true);
110 
111   // Clear a range covered by the bitmap using madvise if possible.
112   void ClearRange(const mirror::Object* begin, const mirror::Object* end);
113 
114   // Test whether `obj` is part of the bitmap (i.e. return whether the bit
115   // corresponding to `obj` has been set in the bitmap).
116   //
117   // Precondition: `obj` is within the range of pointers that this bitmap could
118   // potentially cover (i.e. `this->HasAddress(obj)` is true)
119   bool Test(const mirror::Object* obj) const;
120 
121   // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
122   // even if a bit has not been set for it.
HasAddress(const void * obj)123   bool HasAddress(const void* obj) const {
124     // If obj < heap_begin_ then offset underflows to some very large value past the end of the
125     // bitmap.
126     const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_;
127     const size_t index = OffsetToIndex(offset);
128     return index < bitmap_size_ / sizeof(intptr_t);
129   }
130 
131   template <typename Visitor>
VisitRange(uintptr_t visit_begin,uintptr_t visit_end,const Visitor & visitor)132   void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
133     for (; visit_begin < visit_end; visit_begin += kAlignment) {
134       visitor(reinterpret_cast<mirror::Object*>(visit_begin));
135     }
136   }
137 
138   // Find first object while scanning bitmap backwards from visit_begin -> visit_end.
139   // Covers [visit_end, visit_begin] range.
140   mirror::Object* FindPrecedingObject(uintptr_t visit_begin, uintptr_t visit_end = 0) const;
141 
142   // Visit the live objects in the range [visit_begin, visit_end). If kVisitOnce
143   // is true, then only the first live object will be visited.
144   // TODO: Use lock annotations when clang is fixed.
145   // REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
146   template <bool kVisitOnce = false, typename Visitor>
147   void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, Visitor&& visitor) const
148       NO_THREAD_SAFETY_ANALYSIS;
149 
150   // Visit all of the set bits in HeapBegin(), HeapLimit().
151   template <typename Visitor>
VisitAllMarked(Visitor && visitor)152   void VisitAllMarked(Visitor&& visitor) const {
153     VisitMarkedRange(HeapBegin(), HeapLimit(), visitor);
154   }
155 
156   // Visits set bits in address order.  The callback is not permitted to change the bitmap bits or
157   // max during the traversal.
158   template <typename Visitor>
159   void Walk(Visitor&& visitor)
160       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
161 
162   // Walk through the bitmaps in increasing address order, and find the object pointers that
163   // correspond to garbage objects.  Call <callback> zero or more times with lists of these object
164   // pointers. The callback is not permitted to increase the max of either bitmap.
165   static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base,
166                         uintptr_t max, SweepCallback* thunk, void* arg);
167 
168   void CopyFrom(SpaceBitmap* source_bitmap);
169 
170   // Starting address of our internal storage.
Begin()171   Atomic<uintptr_t>* Begin() const {
172     return bitmap_begin_;
173   }
174 
175   // Size of our internal storage
Size()176   size_t Size() const {
177     return bitmap_size_;
178   }
179 
180   // Size in bytes of the memory that the bitmaps spans.
HeapSize()181   uint64_t HeapSize() const {
182     return IndexToOffset<uint64_t>(Size() / sizeof(intptr_t));
183   }
184 
SetHeapSize(size_t bytes)185   void SetHeapSize(size_t bytes) {
186     heap_limit_ = heap_begin_ + bytes;
187     bitmap_size_ = ComputeBitmapSize(bytes);
188     CHECK_EQ(HeapSize(), bytes);
189     if (mem_map_.IsValid()) {
190       mem_map_.SetSize(bitmap_size_);
191     }
192   }
193 
HeapBegin()194   uintptr_t HeapBegin() const {
195     return heap_begin_;
196   }
197 
198   // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()).
HeapLimit()199   uint64_t HeapLimit() const {
200     return heap_limit_;
201   }
202 
203   // Set the max address which can covered by the bitmap.
204   void SetHeapLimit(uintptr_t new_end);
205 
GetName()206   std::string GetName() const {
207     return name_;
208   }
209 
SetName(const std::string & name)210   void SetName(const std::string& name) {
211     name_ = name;
212   }
213 
214   std::string Dump() const;
215 
216   // Dump three bitmap words around obj.
217   std::string DumpMemAround(mirror::Object* obj) const;
218 
219   // Helper function for computing bitmap size based on a 64 bit capacity.
220   static size_t ComputeBitmapSize(uint64_t capacity);
221   static size_t ComputeHeapSize(uint64_t bitmap_bytes);
222 
223   // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
224   // however, we document that this is expected on heap_end_
225 
226   SpaceBitmap() = default;
227   SpaceBitmap(SpaceBitmap&&) noexcept = default;
228   SpaceBitmap& operator=(SpaceBitmap&&) noexcept = default;
229 
IsValid()230   bool IsValid() const {
231     return bitmap_begin_ != nullptr;
232   }
233 
234   // Copy a view of the other bitmap without taking ownership of the underlying data.
CopyView(SpaceBitmap & other)235   void CopyView(SpaceBitmap& other) {
236     bitmap_begin_ = other.bitmap_begin_;
237     bitmap_size_ = other.bitmap_size_;
238     heap_begin_ = other.heap_begin_;
239     heap_limit_ = other.heap_limit_;
240     name_ = other.name_;
241   }
242 
243  private:
244   // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
245   // however, we document that this is expected on heap_end_
246   SpaceBitmap(const std::string& name,
247               MemMap&& mem_map,
248               uintptr_t* bitmap_begin,
249               size_t bitmap_size,
250               const void* heap_begin,
251               size_t heap_capacity);
252 
253   // Change the value of the bit corresponding to `obj` in the bitmap
254   // to `kSetBit` and return the previous value of that bit.
255   template<bool kSetBit>
256   bool Modify(const mirror::Object* obj);
257 
258   // Backing storage for bitmap.
259   MemMap mem_map_;
260 
261   // This bitmap itself, word sized for efficiency in scanning.
262   Atomic<uintptr_t>* bitmap_begin_ = nullptr;
263 
264   // Size of this bitmap.
265   size_t bitmap_size_ = 0u;
266 
267   // The start address of the memory covered by the bitmap, which corresponds to the word
268   // containing the first bit in the bitmap.
269   uintptr_t heap_begin_ = 0u;
270 
271   // The end address of the memory covered by the bitmap. This may not be on a word boundary.
272   uintptr_t heap_limit_ = 0u;
273 
274   // Name of this bitmap.
275   std::string name_;
276 };
277 
278 using ContinuousSpaceBitmap = SpaceBitmap<kObjectAlignment>;
279 
280 // We pick the lowest supported page size to ensure that it's a constexpr, so
281 // that we can keep bitmap accesses optimized. However, this means that when the
282 // large-object alignment is higher than kMinPageSize, then not all bits in the
283 // bitmap are actually in use.
284 // In practice, this happens when running with a kernel that uses 16kB as the
285 // page size, where 1 out of every 4 bits of the bitmap is used.
286 
287 // TODO: In the future, we should consider alternative fixed alignments for
288 // large objects, disassociated from the page size. This would allow us to keep
289 // accesses optimized, while also packing the bitmap efficiently, and reducing
290 // its size enough that it would no longer make sense to allocate it with
291 // mmap().
292 using LargeObjectBitmap = SpaceBitmap<kMinPageSize>;
293 
294 template<size_t kAlignment>
295 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
296 
297 }  // namespace accounting
298 }  // namespace gc
299 }  // namespace art
300 
301 #endif  // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
302