• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 #define V8_HEAP_MARK_COMPACT_INL_H_
7 
8 #include "src/base/bits.h"
9 #include "src/codegen/assembler-inl.h"
10 #include "src/heap/heap-inl.h"
11 #include "src/heap/incremental-marking.h"
12 #include "src/heap/mark-compact.h"
13 #include "src/heap/marking-worklist-inl.h"
14 #include "src/heap/marking-worklist.h"
15 #include "src/heap/objects-visiting-inl.h"
16 #include "src/heap/remembered-set-inl.h"
17 #include "src/objects/js-collection-inl.h"
18 #include "src/objects/js-weak-refs-inl.h"
19 #include "src/objects/slots-inl.h"
20 #include "src/objects/transitions.h"
21 
22 namespace v8 {
23 namespace internal {
24 
MarkObject(HeapObject host,HeapObject obj)25 void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
26   if (marking_state()->WhiteToGrey(obj)) {
27     local_marking_worklists()->Push(obj);
28     if (V8_UNLIKELY(FLAG_track_retaining_path)) {
29       heap_->AddRetainer(host, obj);
30     }
31   }
32 }
33 
MarkRootObject(Root root,HeapObject obj)34 void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
35   if (marking_state()->WhiteToGrey(obj)) {
36     local_marking_worklists()->Push(obj);
37     if (V8_UNLIKELY(FLAG_track_retaining_path)) {
38       heap_->AddRetainingRoot(root, obj);
39     }
40   }
41 }
42 
43 #ifdef ENABLE_MINOR_MC
44 
MarkRootObject(HeapObject obj)45 void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
46   if (Heap::InYoungGeneration(obj) &&
47       non_atomic_marking_state_.WhiteToGrey(obj)) {
48     worklist_->Push(kMainThreadTask, obj);
49   }
50 }
51 
52 #endif
53 
MarkExternallyReferencedObject(HeapObject obj)54 void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
55   if (marking_state()->WhiteToGrey(obj)) {
56     local_marking_worklists()->Push(obj);
57     if (V8_UNLIKELY(FLAG_track_retaining_path)) {
58       heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
59     }
60   }
61 }
62 
RecordSlot(HeapObject object,ObjectSlot slot,HeapObject target)63 void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
64                                       HeapObject target) {
65   RecordSlot(object, HeapObjectSlot(slot), target);
66 }
67 
RecordSlot(HeapObject object,HeapObjectSlot slot,HeapObject target)68 void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
69                                       HeapObject target) {
70   BasicMemoryChunk* target_page = BasicMemoryChunk::FromHeapObject(target);
71   MemoryChunk* source_page = MemoryChunk::FromHeapObject(object);
72   if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
73       !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
74     RememberedSet<OLD_TO_OLD>::Insert<AccessMode::ATOMIC>(source_page,
75                                                           slot.address());
76   }
77 }
78 
RecordSlot(MemoryChunk * source_page,HeapObjectSlot slot,HeapObject target)79 void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
80                                       HeapObjectSlot slot, HeapObject target) {
81   BasicMemoryChunk* target_page = BasicMemoryChunk::FromHeapObject(target);
82   if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>()) {
83     RememberedSet<OLD_TO_OLD>::Insert<AccessMode::ATOMIC>(source_page,
84                                                           slot.address());
85   }
86 }
87 
AddTransitionArray(TransitionArray array)88 void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
89   weak_objects_.transition_arrays.Push(kMainThreadTask, array);
90 }
91 
92 template <typename MarkingState>
93 template <typename T, typename TBodyDescriptor>
VisitJSObjectSubclass(Map map,T object)94 int MainMarkingVisitor<MarkingState>::VisitJSObjectSubclass(Map map, T object) {
95   if (!this->ShouldVisit(object)) return 0;
96   this->VisitMapPointer(object);
97   int size = TBodyDescriptor::SizeOf(map, object);
98   TBodyDescriptor::IterateBody(map, object, size, this);
99   return size;
100 }
101 
102 template <typename MarkingState>
103 template <typename T>
VisitLeftTrimmableArray(Map map,T object)104 int MainMarkingVisitor<MarkingState>::VisitLeftTrimmableArray(Map map,
105                                                               T object) {
106   if (!this->ShouldVisit(object)) return 0;
107   int size = T::SizeFor(object.length());
108   this->VisitMapPointer(object);
109   T::BodyDescriptor::IterateBody(map, object, size, this);
110   return size;
111 }
112 
113 template <typename MarkingState>
114 template <typename TSlot>
RecordSlot(HeapObject object,TSlot slot,HeapObject target)115 void MainMarkingVisitor<MarkingState>::RecordSlot(HeapObject object, TSlot slot,
116                                                   HeapObject target) {
117   MarkCompactCollector::RecordSlot(object, slot, target);
118 }
119 
120 template <typename MarkingState>
RecordRelocSlot(Code host,RelocInfo * rinfo,HeapObject target)121 void MainMarkingVisitor<MarkingState>::RecordRelocSlot(Code host,
122                                                        RelocInfo* rinfo,
123                                                        HeapObject target) {
124   MarkCompactCollector::RecordRelocSlot(host, rinfo, target);
125 }
126 
127 template <typename MarkingState>
MarkDescriptorArrayFromWriteBarrier(DescriptorArray descriptors,int number_of_own_descriptors)128 void MainMarkingVisitor<MarkingState>::MarkDescriptorArrayFromWriteBarrier(
129     DescriptorArray descriptors, int number_of_own_descriptors) {
130   // This is necessary because the Scavenger records slots only for the
131   // promoted black objects and the marking visitor of DescriptorArray skips
132   // the descriptors marked by the visitor.VisitDescriptors() below.
133   this->MarkDescriptorArrayBlack(descriptors);
134   this->VisitDescriptors(descriptors, number_of_own_descriptors);
135 }
136 
137 template <LiveObjectIterationMode mode>
iterator(const MemoryChunk * chunk,Bitmap * bitmap,Address start)138 LiveObjectRange<mode>::iterator::iterator(const MemoryChunk* chunk,
139                                           Bitmap* bitmap, Address start)
140     : chunk_(chunk),
141       one_word_filler_map_(
142           ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
143       two_word_filler_map_(
144           ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
145       free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
146       it_(chunk, bitmap) {
147   it_.Advance(Bitmap::IndexToCell(
148       Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
149   if (!it_.Done()) {
150     cell_base_ = it_.CurrentCellBase();
151     current_cell_ = *it_.CurrentCell();
152     AdvanceToNextValidObject();
153   }
154 }
155 
156 template <LiveObjectIterationMode mode>
157 typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
158 operator++() {
159   AdvanceToNextValidObject();
160   return *this;
161 }
162 
163 template <LiveObjectIterationMode mode>
164 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
165 operator++(int) {
166   iterator retval = *this;
167   ++(*this);
168   return retval;
169 }
170 
171 template <LiveObjectIterationMode mode>
AdvanceToNextValidObject()172 void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
173   while (!it_.Done()) {
174     HeapObject object;
175     int size = 0;
176     while (current_cell_ != 0) {
177       uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
178       Address addr = cell_base_ + trailing_zeros * kTaggedSize;
179 
180       // Clear the first bit of the found object..
181       current_cell_ &= ~(1u << trailing_zeros);
182 
183       uint32_t second_bit_index = 0;
184       if (trailing_zeros >= Bitmap::kBitIndexMask) {
185         second_bit_index = 0x1;
186         // The overlapping case; there has to exist a cell after the current
187         // cell.
188         // However, if there is a black area at the end of the page, and the
189         // last word is a one word filler, we are not allowed to advance. In
190         // that case we can return immediately.
191         if (!it_.Advance()) {
192           DCHECK(HeapObject::FromAddress(addr).map() == one_word_filler_map_);
193           current_object_ = HeapObject();
194           return;
195         }
196         cell_base_ = it_.CurrentCellBase();
197         current_cell_ = *it_.CurrentCell();
198       } else {
199         second_bit_index = 1u << (trailing_zeros + 1);
200       }
201 
202       Map map;
203       if (current_cell_ & second_bit_index) {
204         // We found a black object. If the black object is within a black area,
205         // make sure that we skip all set bits in the black area until the
206         // object ends.
207         HeapObject black_object = HeapObject::FromAddress(addr);
208         Object map_object = ObjectSlot(addr).Acquire_Load();
209         CHECK(map_object.IsMap());
210         map = Map::cast(map_object);
211         size = black_object.SizeFromMap(map);
212         CHECK_LE(addr + size, chunk_->area_end());
213         Address end = addr + size - kTaggedSize;
214         // One word filler objects do not borrow the second mark bit. We have
215         // to jump over the advancing and clearing part.
216         // Note that we know that we are at a one word filler when
217         // object_start + object_size - kTaggedSize == object_start.
218         if (addr != end) {
219           DCHECK_EQ(chunk_, BasicMemoryChunk::FromAddress(end));
220           uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
221           unsigned int end_cell_index =
222               end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
223           MarkBit::CellType end_index_mask =
224               1u << Bitmap::IndexInCell(end_mark_bit_index);
225           if (it_.Advance(end_cell_index)) {
226             cell_base_ = it_.CurrentCellBase();
227             current_cell_ = *it_.CurrentCell();
228           }
229 
230           // Clear all bits in current_cell, including the end index.
231           current_cell_ &= ~(end_index_mask + end_index_mask - 1);
232         }
233 
234         if (mode == kBlackObjects || mode == kAllLiveObjects) {
235           object = black_object;
236         }
237       } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
238         Object map_object = ObjectSlot(addr).Acquire_Load();
239         CHECK(map_object.IsMap());
240         map = Map::cast(map_object);
241         object = HeapObject::FromAddress(addr);
242         size = object.SizeFromMap(map);
243         CHECK_LE(addr + size, chunk_->area_end());
244       }
245 
246       // We found a live object.
247       if (!object.is_null()) {
248         // Do not use IsFreeSpaceOrFiller() here. This may cause a data race for
249         // reading out the instance type when a new map concurrently is written
250         // into this object while iterating over the object.
251         if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
252             map == free_space_map_) {
253           // There are two reasons why we can get black or grey fillers:
254           // 1) Black areas together with slack tracking may result in black one
255           // word filler objects.
256           // 2) Left trimming may leave black or grey fillers behind because we
257           // do not clear the old location of the object start.
258           // We filter these objects out in the iterator.
259           object = HeapObject();
260         } else {
261           break;
262         }
263       }
264     }
265 
266     if (current_cell_ == 0) {
267       if (it_.Advance()) {
268         cell_base_ = it_.CurrentCellBase();
269         current_cell_ = *it_.CurrentCell();
270       }
271     }
272     if (!object.is_null()) {
273       current_object_ = object;
274       current_size_ = size;
275       return;
276     }
277   }
278   current_object_ = HeapObject();
279 }
280 
281 template <LiveObjectIterationMode mode>
begin()282 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
283   return iterator(chunk_, bitmap_, start_);
284 }
285 
286 template <LiveObjectIterationMode mode>
end()287 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
288   return iterator(chunk_, bitmap_, end_);
289 }
290 
isolate()291 Isolate* MarkCompactCollectorBase::isolate() { return heap()->isolate(); }
292 
293 }  // namespace internal
294 }  // namespace v8
295 
296 #endif  // V8_HEAP_MARK_COMPACT_INL_H_
297