• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
19 
20 #include <deque>
21 #include <memory>  // For unique_ptr.
22 
23 #include "atomic.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc_root.h"
28 #include "gc/accounting/heap_bitmap.h"
29 #include "immune_spaces.h"
30 #include "lock_word.h"
31 #include "object_callbacks.h"
32 #include "offsets.h"
33 
34 namespace art {
35 
36 class Thread;
37 
38 namespace mirror {
39   class Class;
40   class Object;
41 }  // namespace mirror
42 
43 namespace gc {
44 
45 class Heap;
46 
47 namespace accounting {
48   template <typename T> class AtomicStack;
49   typedef AtomicStack<mirror::Object> ObjectStack;
50 }  // namespace accounting
51 
52 namespace space {
53   class BumpPointerSpace;
54   class ContinuousMemMapAllocSpace;
55   class ContinuousSpace;
56 }  // namespace space
57 
58 namespace collector {
59 
60 class MarkCompact : public GarbageCollector {
61  public:
62   explicit MarkCompact(Heap* heap, const std::string& name_prefix = "");
~MarkCompact()63   ~MarkCompact() {}
64 
65   virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
66   void InitializePhase();
67   void MarkingPhase() REQUIRES(Locks::mutator_lock_)
68       REQUIRES(!Locks::heap_bitmap_lock_);
69   void ReclaimPhase() REQUIRES(Locks::mutator_lock_)
70       REQUIRES(!Locks::heap_bitmap_lock_);
71   void FinishPhase() REQUIRES(Locks::mutator_lock_);
72   void MarkReachableObjects()
73       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
GetGcType()74   virtual GcType GetGcType() const OVERRIDE {
75     return kGcTypePartial;
76   }
GetCollectorType()77   virtual CollectorType GetCollectorType() const OVERRIDE {
78     return kCollectorTypeMC;
79   }
80 
81   // Sets which space we will be copying objects in.
82   void SetSpace(space::BumpPointerSpace* space);
83 
84   // Initializes internal structures.
85   void Init();
86 
87   // Find the default mark bitmap.
88   void FindDefaultMarkBitmap();
89 
90   void ScanObject(mirror::Object* obj)
91       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
92 
93   // Marks the root set at the start of a garbage collection.
94   void MarkRoots()
95       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
96 
97   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
98   // the image. Mark that portion of the heap as immune.
99   void BindBitmaps() SHARED_REQUIRES(Locks::mutator_lock_)
100       REQUIRES(!Locks::heap_bitmap_lock_);
101 
102   void UnBindBitmaps()
103       REQUIRES(Locks::heap_bitmap_lock_);
104 
105   void ProcessReferences(Thread* self) REQUIRES(Locks::mutator_lock_)
106       REQUIRES(Locks::mutator_lock_);
107 
108   // Sweeps unmarked objects to complete the garbage collection.
109   void Sweep(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
110 
111   // Sweeps unmarked objects to complete the garbage collection.
112   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
113 
114   void SweepSystemWeaks()
115       SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
116 
117   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
118       OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
119 
120   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
121                           const RootInfo& info)
122       OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
123 
124   // Schedules an unmarked object for reference processing.
125   void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
126       SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
127 
128  protected:
129   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
130   // object for non movable things).
131   mirror::Object* GetMarkedForwardAddress(mirror::Object* object)
132       REQUIRES(Locks::mutator_lock_)
133       SHARED_REQUIRES(Locks::heap_bitmap_lock_);
134 
135   // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
136   // mark, otherwise we unmark.
137   bool MarkLargeObject(const mirror::Object* obj)
138       REQUIRES(Locks::heap_bitmap_lock_)
139       SHARED_REQUIRES(Locks::mutator_lock_);
140 
141   // Expand mark stack to 2x its current size.
142   void ResizeMarkStack(size_t new_size) SHARED_REQUIRES(Locks::mutator_lock_);
143 
144   // Returns true if we should sweep the space.
145   bool ShouldSweepSpace(space::ContinuousSpace* space) const;
146 
147   // Push an object onto the mark stack.
148   void MarkStackPush(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_);
149 
150   void UpdateAndMarkModUnion()
151       REQUIRES(Locks::heap_bitmap_lock_)
152       SHARED_REQUIRES(Locks::mutator_lock_);
153 
154   // Recursively blackens objects on the mark stack.
155   void ProcessMarkStack()
156       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
157 
158   // 3 pass mark compact approach.
159   void Compact() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
160   // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding
161   // bitmap.
162   void CalculateObjectForwardingAddresses()
163       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
164   // Update the references of objects by using the forwarding addresses.
165   void UpdateReferences() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
166   // Move objects and restore lock words.
167   void MoveObjects() REQUIRES(Locks::mutator_lock_);
168   // Move a single object to its forward address.
169   void MoveObject(mirror::Object* obj, size_t len) REQUIRES(Locks::mutator_lock_);
170   // Mark a single object.
171   virtual mirror::Object* MarkObject(mirror::Object* obj) OVERRIDE
172       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
173   virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr) OVERRIDE
174       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
175   virtual mirror::Object* IsMarked(mirror::Object* obj) OVERRIDE
176       SHARED_REQUIRES(Locks::heap_bitmap_lock_)
177       REQUIRES(Locks::mutator_lock_);
178   virtual bool IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* obj) OVERRIDE
179       SHARED_REQUIRES(Locks::heap_bitmap_lock_)
180       REQUIRES(Locks::mutator_lock_);
181   void ForwardObject(mirror::Object* obj) REQUIRES(Locks::heap_bitmap_lock_,
182                                                                    Locks::mutator_lock_);
183   // Update a single heap reference.
184   void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference)
185       SHARED_REQUIRES(Locks::heap_bitmap_lock_)
186       REQUIRES(Locks::mutator_lock_);
187   // Update all of the references of a single object.
188   void UpdateObjectReferences(mirror::Object* obj)
189       SHARED_REQUIRES(Locks::heap_bitmap_lock_)
190       REQUIRES(Locks::mutator_lock_);
191 
192   // Revoke all the thread-local buffers.
193   void RevokeAllThreadLocalBuffers();
194 
195   accounting::ObjectStack* mark_stack_;
196 
197   // Every object inside the immune spaces is assumed to be marked.
198   ImmuneSpaces immune_spaces_;
199 
200   // Bump pointer space which we are collecting.
201   space::BumpPointerSpace* space_;
202   // Cached mark bitmap as an optimization.
203   accounting::HeapBitmap* mark_bitmap_;
204 
205   // The name of the collector.
206   std::string collector_name_;
207 
208   // The bump pointer in the space where the next forwarding address will be.
209   uint8_t* bump_pointer_;
210   // How many live objects we have in the space.
211   size_t live_objects_in_space_;
212 
213   // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle
214   // objects which are only 8 bytes.
215   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_;
216   // Bitmap which describes which lock words we need to restore.
217   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_;
218   // Which lock words we need to restore as we are moving objects.
219   std::deque<LockWord> lock_words_to_restore_;
220 
221   // State whether or not we are updating references.
222   bool updating_references_;
223 
224  private:
225   friend class BitmapSetSlowPathVisitor;
226   friend class CalculateObjectForwardingAddressVisitor;
227   friend class MarkCompactMarkObjectVisitor;
228   friend class MoveObjectVisitor;
229   friend class UpdateObjectReferencesVisitor;
230   friend class UpdateReferenceVisitor;
231   friend class UpdateRootVisitor;
232 
233   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkCompact);
234 };
235 
236 }  // namespace collector
237 }  // namespace gc
238 }  // namespace art
239 
240 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
241