• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
19 
20 #include <deque>
21 #include <memory>  // For unique_ptr.
22 
23 #include "atomic.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc/accounting/heap_bitmap.h"
28 #include "immune_region.h"
29 #include "lock_word.h"
30 #include "object_callbacks.h"
31 #include "offsets.h"
32 
33 namespace art {
34 
35 class Thread;
36 
37 namespace mirror {
38   class Class;
39   class Object;
40 }  // namespace mirror
41 
42 namespace gc {
43 
44 class Heap;
45 
46 namespace accounting {
47   template <typename T> class AtomicStack;
48   typedef AtomicStack<mirror::Object*> ObjectStack;
49 }  // namespace accounting
50 
51 namespace space {
52   class BumpPointerSpace;
53   class ContinuousMemMapAllocSpace;
54   class ContinuousSpace;
55 }  // namespace space
56 
57 namespace collector {
58 
59 class MarkCompact : public GarbageCollector {
60  public:
61   explicit MarkCompact(Heap* heap, const std::string& name_prefix = "");
~MarkCompact()62   ~MarkCompact() {}
63 
64   virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
65   void InitializePhase();
66   void MarkingPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
67       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
68   void ReclaimPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
69       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
70   void FinishPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
71   void MarkReachableObjects()
72       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
GetGcType()73   virtual GcType GetGcType() const OVERRIDE {
74     return kGcTypePartial;
75   }
GetCollectorType()76   virtual CollectorType GetCollectorType() const OVERRIDE {
77     return kCollectorTypeMC;
78   }
79 
80   // Sets which space we will be copying objects in.
81   void SetSpace(space::BumpPointerSpace* space);
82 
83   // Initializes internal structures.
84   void Init();
85 
86   // Find the default mark bitmap.
87   void FindDefaultMarkBitmap();
88 
89   void ScanObject(mirror::Object* obj)
90       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
91 
92   // Marks the root set at the start of a garbage collection.
93   void MarkRoots()
94       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
95 
96   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
97   // the image. Mark that portion of the heap as immune.
98   void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
99       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
100 
101   void UnBindBitmaps()
102       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
103 
104   void ProcessReferences(Thread* self) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
105       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
106 
107   // Sweeps unmarked objects to complete the garbage collection.
108   void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
109 
110   // Sweeps unmarked objects to complete the garbage collection.
111   void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
112 
113   void SweepSystemWeaks()
114       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
115 
116   static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t /*tid*/,
117                                RootType /*root_type*/)
118       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
119 
120   static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg)
121       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
122 
123   static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr, void* arg)
124       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
125 
126   static bool HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
127                                           void* arg)
128       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
129 
130   static void ProcessMarkStackCallback(void* arg)
131       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
132 
133   static void DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
134                                              void* arg)
135       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
136 
137   // Schedules an unmarked object for reference processing.
138   void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
139       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
140 
141  protected:
142   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
143   // object for non movable things).
144   mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const
145       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
146       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
147 
148   static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg)
149       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
150       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
151 
152   // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
153   // mark, otherwise we unmark.
154   bool MarkLargeObject(const mirror::Object* obj)
155       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
156       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
157 
158   // Expand mark stack to 2x its current size.
159   void ResizeMarkStack(size_t new_size);
160 
161   // Returns true if we should sweep the space.
162   bool ShouldSweepSpace(space::ContinuousSpace* space) const;
163 
164   // Push an object onto the mark stack.
165   void MarkStackPush(mirror::Object* obj);
166 
167   void UpdateAndMarkModUnion()
168       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
169       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
170 
171   // Recursively blackens objects on the mark stack.
172   void ProcessMarkStack()
173       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
174 
175   // 3 pass mark compact approach.
176   void Compact() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
177   // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding
178   // bitmap.
179   void CalculateObjectForwardingAddresses()
180       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
181   // Update the references of objects by using the forwarding addresses.
182   void UpdateReferences() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
183   static void UpdateRootCallback(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
184                                  RootType /*root_type*/)
185       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
186       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
187   // Move objects and restore lock words.
188   void MoveObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
189   // Move a single object to its forward address.
190   void MoveObject(mirror::Object* obj, size_t len) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
191   // Mark a single object.
192   void MarkObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
193                                                                 Locks::mutator_lock_);
194   bool IsMarked(const mirror::Object* obj) const
195       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
196   static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
197       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
198   void ForwardObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
199                                                                    Locks::mutator_lock_);
200   // Update a single heap reference.
201   void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference)
202       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
203       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
204   static void UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
205                                           void* arg)
206       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
207       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
208   // Update all of the references of a single object.
209   void UpdateObjectReferences(mirror::Object* obj)
210       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
211       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
212 
213   // Revoke all the thread-local buffers.
214   void RevokeAllThreadLocalBuffers();
215 
216   accounting::ObjectStack* mark_stack_;
217 
218   // Immune region, every object inside the immune region is assumed to be marked.
219   ImmuneRegion immune_region_;
220 
221   // Bump pointer space which we are collecting.
222   space::BumpPointerSpace* space_;
223   // Cached mark bitmap as an optimization.
224   accounting::HeapBitmap* mark_bitmap_;
225 
226   // The name of the collector.
227   std::string collector_name_;
228 
229   // The bump pointer in the space where the next forwarding address will be.
230   byte* bump_pointer_;
231   // How many live objects we have in the space.
232   size_t live_objects_in_space_;
233 
234   // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle
235   // objects which are only 8 bytes.
236   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_;
237   // Bitmap which describes which lock words we need to restore.
238   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_;
239   // Which lock words we need to restore as we are moving objects.
240   std::deque<LockWord> lock_words_to_restore_;
241 
242  private:
243   friend class BitmapSetSlowPathVisitor;
244   friend class CalculateObjectForwardingAddressVisitor;
245   friend class MarkCompactMarkObjectVisitor;
246   friend class MoveObjectVisitor;
247   friend class UpdateObjectReferencesVisitor;
248   friend class UpdateReferenceVisitor;
249   DISALLOW_COPY_AND_ASSIGN(MarkCompact);
250 };
251 
252 }  // namespace collector
253 }  // namespace gc
254 }  // namespace art
255 
256 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
257