• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 #define V8_HEAP_MARK_COMPACT_INL_H_
7 
8 #include "src/base/bits.h"
9 #include "src/heap/mark-compact.h"
10 #include "src/heap/objects-visiting-inl.h"
11 #include "src/heap/remembered-set.h"
12 #include "src/objects/js-collection-inl.h"
13 
14 namespace v8 {
15 namespace internal {
16 
17 template <FixedArrayVisitationMode fixed_array_mode,
18           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
19 MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingVisitor(MarkCompactCollector * collector,MarkingState * marking_state)20                MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
21                                              MarkingState* marking_state)
22     : heap_(collector->heap()),
23       collector_(collector),
24       marking_state_(marking_state) {}
25 
26 template <FixedArrayVisitationMode fixed_array_mode,
27           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
28 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitAllocationSite(Map * map,AllocationSite * object)29                    MarkingState>::VisitAllocationSite(Map* map,
30                                                       AllocationSite* object) {
31   int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
32   AllocationSite::BodyDescriptorWeak::IterateBody(map, object, size, this);
33   return size;
34 }
35 
36 template <FixedArrayVisitationMode fixed_array_mode,
37           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
38 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitBytecodeArray(Map * map,BytecodeArray * array)39                    MarkingState>::VisitBytecodeArray(Map* map,
40                                                      BytecodeArray* array) {
41   int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
42   BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
43   array->MakeOlder();
44   return size;
45 }
46 
47 template <FixedArrayVisitationMode fixed_array_mode,
48           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
49 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitCodeDataContainer(Map * map,CodeDataContainer * object)50     VisitCodeDataContainer(Map* map, CodeDataContainer* object) {
51   int size = CodeDataContainer::BodyDescriptorWeak::SizeOf(map, object);
52   CodeDataContainer::BodyDescriptorWeak::IterateBody(map, object, size, this);
53   return size;
54 }
55 
56 template <FixedArrayVisitationMode fixed_array_mode,
57           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
58 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitFixedArray(Map * map,FixedArray * object)59                    MarkingState>::VisitFixedArray(Map* map,
60                                                   FixedArray* object) {
61   return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
62              ? Parent::VisitFixedArray(map, object)
63              : VisitFixedArrayIncremental(map, object);
64 }
65 
66 template <FixedArrayVisitationMode fixed_array_mode,
67           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
68 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitJSApiObject(Map * map,JSObject * object)69                    MarkingState>::VisitJSApiObject(Map* map, JSObject* object) {
70   if (heap_->local_embedder_heap_tracer()->InUse()) {
71     DCHECK(object->IsJSObject());
72     heap_->TracePossibleWrapper(object);
73   }
74   int size = JSObject::BodyDescriptor::SizeOf(map, object);
75   JSObject::BodyDescriptor::IterateBody(map, object, size, this);
76   return size;
77 }
78 
79 template <FixedArrayVisitationMode fixed_array_mode,
80           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
81 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitJSFunction(Map * map,JSFunction * object)82                    MarkingState>::VisitJSFunction(Map* map,
83                                                   JSFunction* object) {
84   int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
85   JSFunction::BodyDescriptorWeak::IterateBody(map, object, size, this);
86   return size;
87 }
88 
89 template <FixedArrayVisitationMode fixed_array_mode,
90           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
91 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitEphemeronHashTable(Map * map,EphemeronHashTable * table)92     VisitEphemeronHashTable(Map* map, EphemeronHashTable* table) {
93   collector_->AddEphemeronHashTable(table);
94 
95   for (int i = 0; i < table->Capacity(); i++) {
96     Object** key_slot =
97         table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
98     HeapObject* key = HeapObject::cast(table->KeyAt(i));
99     collector_->RecordSlot(table, key_slot, key);
100 
101     Object** value_slot =
102         table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
103 
104     if (marking_state()->IsBlackOrGrey(key)) {
105       VisitPointer(table, value_slot);
106 
107     } else {
108       Object* value_obj = *value_slot;
109 
110       if (value_obj->IsHeapObject()) {
111         HeapObject* value = HeapObject::cast(value_obj);
112         collector_->RecordSlot(table, value_slot, value);
113 
114         // Revisit ephemerons with both key and value unreachable at end
115         // of concurrent marking cycle.
116         if (marking_state()->IsWhite(value)) {
117           collector_->AddEphemeron(key, value);
118         }
119       }
120     }
121   }
122 
123   return table->SizeFromMap(map);
124 }
125 
126 template <FixedArrayVisitationMode fixed_array_mode,
127           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
128 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitMap(Map * map,Map * object)129                    MarkingState>::VisitMap(Map* map, Map* object) {
130   // When map collection is enabled we have to mark through map's transitions
131   // and back pointers in a special way to make these links weak.
132   int size = Map::BodyDescriptor::SizeOf(map, object);
133   if (object->CanTransition()) {
134     MarkMapContents(object);
135   } else {
136     Map::BodyDescriptor::IterateBody(map, object, size, this);
137   }
138   return size;
139 }
140 
141 template <FixedArrayVisitationMode fixed_array_mode,
142           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
143 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitNativeContext(Map * map,Context * context)144                    MarkingState>::VisitNativeContext(Map* map,
145                                                      Context* context) {
146   int size = Context::BodyDescriptorWeak::SizeOf(map, context);
147   Context::BodyDescriptorWeak::IterateBody(map, context, size, this);
148   return size;
149 }
150 
151 template <FixedArrayVisitationMode fixed_array_mode,
152           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
153 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitTransitionArray(Map * map,TransitionArray * array)154                    MarkingState>::VisitTransitionArray(Map* map,
155                                                        TransitionArray* array) {
156   int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
157   TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
158   collector_->AddTransitionArray(array);
159   return size;
160 }
161 
162 template <FixedArrayVisitationMode fixed_array_mode,
163           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
164 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointer(HeapObject * host,Object ** p)165                     MarkingState>::VisitPointer(HeapObject* host, Object** p) {
166   if (!(*p)->IsHeapObject()) return;
167   HeapObject* target_object = HeapObject::cast(*p);
168   collector_->RecordSlot(host, p, target_object);
169   MarkObject(host, target_object);
170 }
171 
172 template <FixedArrayVisitationMode fixed_array_mode,
173           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
174 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointer(HeapObject * host,MaybeObject ** p)175                     MarkingState>::VisitPointer(HeapObject* host,
176                                                 MaybeObject** p) {
177   HeapObject* target_object;
178   if ((*p)->ToStrongHeapObject(&target_object)) {
179     collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
180                            target_object);
181     MarkObject(host, target_object);
182   } else if ((*p)->ToWeakHeapObject(&target_object)) {
183     if (marking_state()->IsBlackOrGrey(target_object)) {
184       // Weak references with live values are directly processed here to reduce
185       // the processing time of weak cells during the main GC pause.
186       collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
187                              target_object);
188     } else {
189       // If we do not know about liveness of values of weak cells, we have to
190       // process them when we know the liveness of the whole transitive
191       // closure.
192       collector_->AddWeakReference(host,
193                                    reinterpret_cast<HeapObjectReference**>(p));
194     }
195   }
196 }
197 
198 template <FixedArrayVisitationMode fixed_array_mode,
199           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
200 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointers(HeapObject * host,Object ** start,Object ** end)201                     MarkingState>::VisitPointers(HeapObject* host,
202                                                  Object** start, Object** end) {
203   for (Object** p = start; p < end; p++) {
204     VisitPointer(host, p);
205   }
206 }
207 
208 template <FixedArrayVisitationMode fixed_array_mode,
209           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
210 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointers(HeapObject * host,MaybeObject ** start,MaybeObject ** end)211                     MarkingState>::VisitPointers(HeapObject* host,
212                                                  MaybeObject** start,
213                                                  MaybeObject** end) {
214   for (MaybeObject** p = start; p < end; p++) {
215     VisitPointer(host, p);
216   }
217 }
218 
219 template <FixedArrayVisitationMode fixed_array_mode,
220           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
221 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitEmbeddedPointer(Code * host,RelocInfo * rinfo)222                     MarkingState>::VisitEmbeddedPointer(Code* host,
223                                                         RelocInfo* rinfo) {
224   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
225   HeapObject* object = HeapObject::cast(rinfo->target_object());
226   collector_->RecordRelocSlot(host, rinfo, object);
227   if (!host->IsWeakObject(object)) {
228     MarkObject(host, object);
229   } else if (!marking_state()->IsBlackOrGrey(object)) {
230     collector_->AddWeakObjectInCode(object, host);
231   }
232 }
233 
234 template <FixedArrayVisitationMode fixed_array_mode,
235           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
236 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitCodeTarget(Code * host,RelocInfo * rinfo)237                     MarkingState>::VisitCodeTarget(Code* host,
238                                                    RelocInfo* rinfo) {
239   DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
240   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
241   collector_->RecordRelocSlot(host, rinfo, target);
242   MarkObject(host, target);
243 }
244 
245 template <FixedArrayVisitationMode fixed_array_mode,
246           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
247 bool MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkObjectWithoutPush(HeapObject * host,HeapObject * object)248                     MarkingState>::MarkObjectWithoutPush(HeapObject* host,
249                                                          HeapObject* object) {
250   if (marking_state()->WhiteToBlack(object)) {
251     if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
252         V8_UNLIKELY(FLAG_track_retaining_path)) {
253       heap_->AddRetainer(host, object);
254     }
255     return true;
256   }
257   return false;
258 }
259 
260 template <FixedArrayVisitationMode fixed_array_mode,
261           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
262 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkObject(HeapObject * host,HeapObject * object)263                     MarkingState>::MarkObject(HeapObject* host,
264                                               HeapObject* object) {
265   if (marking_state()->WhiteToGrey(object)) {
266     marking_worklist()->Push(object);
267     if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
268         V8_UNLIKELY(FLAG_track_retaining_path)) {
269       heap_->AddRetainer(host, object);
270     }
271   }
272 }
273 
274 template <FixedArrayVisitationMode fixed_array_mode,
275           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
276 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitFixedArrayIncremental(Map * map,FixedArray * object)277     VisitFixedArrayIncremental(Map* map, FixedArray* object) {
278   MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
279   int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
280   if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
281     DCHECK(!FLAG_use_marking_progress_bar ||
282            chunk->owner()->identity() == LO_SPACE);
283     // When using a progress bar for large fixed arrays, scan only a chunk of
284     // the array and try to push it onto the marking deque again until it is
285     // fully scanned. Fall back to scanning it through to the end in case this
286     // fails because of a full deque.
287     int start_offset =
288         Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
289     if (start_offset < object_size) {
290       // Ensure that the object is either grey or black before pushing it
291       // into marking worklist.
292       marking_state()->WhiteToGrey(object);
293       if (FLAG_concurrent_marking) {
294         marking_worklist()->PushBailout(object);
295       } else {
296         marking_worklist()->Push(object);
297       }
298       DCHECK(marking_state()->IsGrey(object) ||
299              marking_state()->IsBlack(object));
300 
301       int end_offset =
302           Min(object_size, start_offset + kProgressBarScanningChunk);
303       int already_scanned_offset = start_offset;
304       VisitPointers(object, HeapObject::RawField(object, start_offset),
305                     HeapObject::RawField(object, end_offset));
306       start_offset = end_offset;
307       end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
308       chunk->set_progress_bar(start_offset);
309       if (start_offset < object_size) {
310         heap_->incremental_marking()->NotifyIncompleteScanOfObject(
311             object_size - (start_offset - already_scanned_offset));
312       }
313     }
314   } else {
315     FixedArray::BodyDescriptor::IterateBody(map, object, object_size, this);
316   }
317   return object_size;
318 }
319 
320 template <FixedArrayVisitationMode fixed_array_mode,
321           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
322 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkMapContents(Map * map)323                     MarkingState>::MarkMapContents(Map* map) {
324   // Since descriptor arrays are potentially shared, ensure that only the
325   // descriptors that belong to this map are marked. The first time a non-empty
326   // descriptor array is marked, its header is also visited. The slot holding
327   // the descriptor array will be implicitly recorded when the pointer fields of
328   // this map are visited.  Prototype maps don't keep track of transitions, so
329   // just mark the entire descriptor array.
330   if (!map->is_prototype_map()) {
331     DescriptorArray* descriptors = map->instance_descriptors();
332     if (MarkObjectWithoutPush(map, descriptors) && descriptors->length() > 0) {
333       VisitPointers(descriptors, descriptors->GetFirstElementAddress(),
334                     descriptors->GetDescriptorEndSlot(0));
335     }
336     int start = 0;
337     int end = map->NumberOfOwnDescriptors();
338     if (start < end) {
339       VisitPointers(descriptors, descriptors->GetDescriptorStartSlot(start),
340                     descriptors->GetDescriptorEndSlot(end));
341     }
342   }
343 
344   // Mark the pointer fields of the Map. Since the transitions array has
345   // been marked already, it is fine that one of these fields contains a
346   // pointer to it.
347   Map::BodyDescriptor::IterateBody(
348       map->map(), map, Map::BodyDescriptor::SizeOf(map->map(), map), this);
349 }
350 
MarkObject(HeapObject * host,HeapObject * obj)351 void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
352   if (marking_state()->WhiteToGrey(obj)) {
353     marking_worklist()->Push(obj);
354     if (V8_UNLIKELY(FLAG_track_retaining_path)) {
355       heap_->AddRetainer(host, obj);
356     }
357   }
358 }
359 
MarkRootObject(Root root,HeapObject * obj)360 void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
361   if (marking_state()->WhiteToGrey(obj)) {
362     marking_worklist()->Push(obj);
363     if (V8_UNLIKELY(FLAG_track_retaining_path)) {
364       heap_->AddRetainingRoot(root, obj);
365     }
366   }
367 }
368 
369 #ifdef ENABLE_MINOR_MC
370 
MarkRootObject(HeapObject * obj)371 void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
372   if (Heap::InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
373     worklist_->Push(kMainThread, obj);
374   }
375 }
376 
377 #endif
378 
MarkExternallyReferencedObject(HeapObject * obj)379 void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
380   if (marking_state()->WhiteToGrey(obj)) {
381     marking_worklist()->Push(obj);
382     if (V8_UNLIKELY(FLAG_track_retaining_path)) {
383       heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
384     }
385   }
386 }
387 
RecordSlot(HeapObject * object,Object ** slot,HeapObject * target)388 void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
389                                       HeapObject* target) {
390   RecordSlot(object, reinterpret_cast<HeapObjectReference**>(slot), target);
391 }
392 
RecordSlot(HeapObject * object,HeapObjectReference ** slot,HeapObject * target)393 void MarkCompactCollector::RecordSlot(HeapObject* object,
394                                       HeapObjectReference** slot,
395                                       HeapObject* target) {
396   Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
397   Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
398   if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
399       !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
400     RememberedSet<OLD_TO_OLD>::Insert(source_page,
401                                       reinterpret_cast<Address>(slot));
402   }
403 }
404 
405 template <LiveObjectIterationMode mode>
iterator(MemoryChunk * chunk,Bitmap * bitmap,Address start)406 LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
407                                           Address start)
408     : chunk_(chunk),
409       one_word_filler_map_(
410           ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
411       two_word_filler_map_(
412           ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
413       free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
414       it_(chunk, bitmap) {
415   it_.Advance(Bitmap::IndexToCell(
416       Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
417   if (!it_.Done()) {
418     cell_base_ = it_.CurrentCellBase();
419     current_cell_ = *it_.CurrentCell();
420     AdvanceToNextValidObject();
421   } else {
422     current_object_ = nullptr;
423   }
424 }
425 
426 template <LiveObjectIterationMode mode>
427 typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
428 operator++() {
429   AdvanceToNextValidObject();
430   return *this;
431 }
432 
433 template <LiveObjectIterationMode mode>
434 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
435 operator++(int) {
436   iterator retval = *this;
437   ++(*this);
438   return retval;
439 }
440 
441 template <LiveObjectIterationMode mode>
AdvanceToNextValidObject()442 void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
443   while (!it_.Done()) {
444     HeapObject* object = nullptr;
445     int size = 0;
446     while (current_cell_ != 0) {
447       uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
448       Address addr = cell_base_ + trailing_zeros * kPointerSize;
449 
450       // Clear the first bit of the found object..
451       current_cell_ &= ~(1u << trailing_zeros);
452 
453       uint32_t second_bit_index = 0;
454       if (trailing_zeros >= Bitmap::kBitIndexMask) {
455         second_bit_index = 0x1;
456         // The overlapping case; there has to exist a cell after the current
457         // cell.
458         // However, if there is a black area at the end of the page, and the
459         // last word is a one word filler, we are not allowed to advance. In
460         // that case we can return immediately.
461         if (!it_.Advance()) {
462           DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
463           current_object_ = nullptr;
464           return;
465         }
466         cell_base_ = it_.CurrentCellBase();
467         current_cell_ = *it_.CurrentCell();
468       } else {
469         second_bit_index = 1u << (trailing_zeros + 1);
470       }
471 
472       Map* map = nullptr;
473       if (current_cell_ & second_bit_index) {
474         // We found a black object. If the black object is within a black area,
475         // make sure that we skip all set bits in the black area until the
476         // object ends.
477         HeapObject* black_object = HeapObject::FromAddress(addr);
478         map =
479             base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
480         size = black_object->SizeFromMap(map);
481         Address end = addr + size - kPointerSize;
482         // One word filler objects do not borrow the second mark bit. We have
483         // to jump over the advancing and clearing part.
484         // Note that we know that we are at a one word filler when
485         // object_start + object_size - kPointerSize == object_start.
486         if (addr != end) {
487           DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
488           uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
489           unsigned int end_cell_index =
490               end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
491           MarkBit::CellType end_index_mask =
492               1u << Bitmap::IndexInCell(end_mark_bit_index);
493           if (it_.Advance(end_cell_index)) {
494             cell_base_ = it_.CurrentCellBase();
495             current_cell_ = *it_.CurrentCell();
496           }
497 
498           // Clear all bits in current_cell, including the end index.
499           current_cell_ &= ~(end_index_mask + end_index_mask - 1);
500         }
501 
502         if (mode == kBlackObjects || mode == kAllLiveObjects) {
503           object = black_object;
504         }
505       } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
506         map =
507             base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
508         object = HeapObject::FromAddress(addr);
509         size = object->SizeFromMap(map);
510       }
511 
512       // We found a live object.
513       if (object != nullptr) {
514         // Do not use IsFiller() here. This may cause a data race for reading
515         // out the instance type when a new map concurrently is written into
516         // this object while iterating over the object.
517         if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
518             map == free_space_map_) {
519           // There are two reasons why we can get black or grey fillers:
520           // 1) Black areas together with slack tracking may result in black one
521           // word filler objects.
522           // 2) Left trimming may leave black or grey fillers behind because we
523           // do not clear the old location of the object start.
524           // We filter these objects out in the iterator.
525           object = nullptr;
526         } else {
527           break;
528         }
529       }
530     }
531 
532     if (current_cell_ == 0) {
533       if (it_.Advance()) {
534         cell_base_ = it_.CurrentCellBase();
535         current_cell_ = *it_.CurrentCell();
536       }
537     }
538     if (object != nullptr) {
539       current_object_ = object;
540       current_size_ = size;
541       return;
542     }
543   }
544   current_object_ = nullptr;
545 }
546 
547 template <LiveObjectIterationMode mode>
begin()548 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
549   return iterator(chunk_, bitmap_, start_);
550 }
551 
552 template <LiveObjectIterationMode mode>
end()553 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
554   return iterator(chunk_, bitmap_, end_);
555 }
556 
isolate()557 Isolate* MarkCompactCollectorBase::isolate() { return heap()->isolate(); }
558 
559 }  // namespace internal
560 }  // namespace v8
561 
562 #endif  // V8_HEAP_MARK_COMPACT_INL_H_
563