• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/scavenger.h"
6 
7 #include "src/contexts.h"
8 #include "src/heap/heap.h"
9 #include "src/heap/objects-visiting-inl.h"
10 #include "src/heap/scavenger-inl.h"
11 #include "src/isolate.h"
12 #include "src/log.h"
13 #include "src/profiler/cpu-profiler.h"
14 
15 namespace v8 {
16 namespace internal {
17 
18 enum LoggingAndProfiling {
19   LOGGING_AND_PROFILING_ENABLED,
20   LOGGING_AND_PROFILING_DISABLED
21 };
22 
23 
24 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
25 
26 
27 template <MarksHandling marks_handling,
28           LoggingAndProfiling logging_and_profiling_mode>
29 class ScavengingVisitor : public StaticVisitorBase {
30  public:
Initialize()31   static void Initialize() {
32     table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
33     table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
34     table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
35     table_.Register(kVisitByteArray, &EvacuateByteArray);
36     table_.Register(kVisitFixedArray, &EvacuateFixedArray);
37     table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
38     table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
39     table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
40     table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
41 
42     table_.Register(
43         kVisitNativeContext,
44         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
45             Context::kSize>);
46 
47     table_.Register(
48         kVisitConsString,
49         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
50             ConsString::kSize>);
51 
52     table_.Register(
53         kVisitSlicedString,
54         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
55             SlicedString::kSize>);
56 
57     table_.Register(
58         kVisitSymbol,
59         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
60             Symbol::kSize>);
61 
62     table_.Register(
63         kVisitSharedFunctionInfo,
64         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
65             SharedFunctionInfo::kSize>);
66 
67     table_.Register(kVisitJSWeakCollection,
68                     &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
69 
70     table_.Register(kVisitJSRegExp,
71                     &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
72 
73     table_.Register(kVisitJSFunction, &EvacuateJSFunction);
74 
75     table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
76                                    kVisitDataObject, kVisitDataObjectGeneric>();
77 
78     table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
79                                    kVisitJSObject, kVisitJSObjectGeneric>();
80 
81     table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
82                                    kVisitStruct, kVisitStructGeneric>();
83   }
84 
GetTable()85   static VisitorDispatchTable<ScavengingCallback>* GetTable() {
86     return &table_;
87   }
88 
89  private:
90   enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
91 
RecordCopiedObject(Heap * heap,HeapObject * obj)92   static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
93     bool should_record = false;
94 #ifdef DEBUG
95     should_record = FLAG_heap_stats;
96 #endif
97     should_record = should_record || FLAG_log_gc;
98     if (should_record) {
99       if (heap->new_space()->Contains(obj)) {
100         heap->new_space()->RecordAllocation(obj);
101       } else {
102         heap->new_space()->RecordPromotion(obj);
103       }
104     }
105   }
106 
107   // Helper function used by CopyObject to copy a source object to an
108   // allocated target object and update the forwarding pointer in the source
109   // object.  Returns the target object.
INLINE(static void MigrateObject (Heap * heap,HeapObject * source,HeapObject * target,int size))110   INLINE(static void MigrateObject(Heap* heap, HeapObject* source,
111                                    HeapObject* target, int size)) {
112     // If we migrate into to-space, then the to-space top pointer should be
113     // right after the target object. Incorporate double alignment
114     // over-allocation.
115     DCHECK(!heap->InToSpace(target) ||
116            target->address() + size == heap->new_space()->top() ||
117            target->address() + size + kPointerSize == heap->new_space()->top());
118 
119     // Make sure that we do not overwrite the promotion queue which is at
120     // the end of to-space.
121     DCHECK(!heap->InToSpace(target) ||
122            heap->promotion_queue()->IsBelowPromotionQueue(
123                heap->new_space()->top()));
124 
125     // Copy the content of source to target.
126     heap->CopyBlock(target->address(), source->address(), size);
127 
128     // Set the forwarding address.
129     source->set_map_word(MapWord::FromForwardingAddress(target));
130 
131     if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
132       // Update NewSpace stats if necessary.
133       RecordCopiedObject(heap, target);
134       heap->OnMoveEvent(target, source, size);
135     }
136 
137     if (marks_handling == TRANSFER_MARKS) {
138       if (Marking::TransferColor(source, target)) {
139         MemoryChunk::IncrementLiveBytesFromGC(target, size);
140       }
141     }
142   }
143 
144   template <AllocationAlignment alignment>
SemiSpaceCopyObject(Map * map,HeapObject ** slot,HeapObject * object,int object_size)145   static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
146                                          HeapObject* object, int object_size) {
147     Heap* heap = map->GetHeap();
148 
149     DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
150     AllocationResult allocation =
151         heap->new_space()->AllocateRaw(object_size, alignment);
152 
153     HeapObject* target = NULL;  // Initialization to please compiler.
154     if (allocation.To(&target)) {
155       // Order is important here: Set the promotion limit before storing a
156       // filler for double alignment or migrating the object. Otherwise we
157       // may end up overwriting promotion queue entries when we migrate the
158       // object.
159       heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
160 
161       MigrateObject(heap, object, target, object_size);
162 
163       // Update slot to new target.
164       *slot = target;
165 
166       heap->IncrementSemiSpaceCopiedObjectSize(object_size);
167       return true;
168     }
169     return false;
170   }
171 
172 
173   template <ObjectContents object_contents, AllocationAlignment alignment>
PromoteObject(Map * map,HeapObject ** slot,HeapObject * object,int object_size)174   static inline bool PromoteObject(Map* map, HeapObject** slot,
175                                    HeapObject* object, int object_size) {
176     Heap* heap = map->GetHeap();
177 
178     AllocationResult allocation =
179         heap->old_space()->AllocateRaw(object_size, alignment);
180 
181     HeapObject* target = NULL;  // Initialization to please compiler.
182     if (allocation.To(&target)) {
183       MigrateObject(heap, object, target, object_size);
184 
185       // Update slot to new target.
186       *slot = target;
187 
188       if (object_contents == POINTER_OBJECT) {
189         heap->promotion_queue()->insert(target, object_size);
190       }
191       heap->IncrementPromotedObjectsSize(object_size);
192       return true;
193     }
194     return false;
195   }
196 
197 
198   template <ObjectContents object_contents, AllocationAlignment alignment>
EvacuateObject(Map * map,HeapObject ** slot,HeapObject * object,int object_size)199   static inline void EvacuateObject(Map* map, HeapObject** slot,
200                                     HeapObject* object, int object_size) {
201     SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
202     SLOW_DCHECK(object->Size() == object_size);
203     Heap* heap = map->GetHeap();
204 
205     if (!heap->ShouldBePromoted(object->address(), object_size)) {
206       // A semi-space copy may fail due to fragmentation. In that case, we
207       // try to promote the object.
208       if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
209         return;
210       }
211     }
212 
213     if (PromoteObject<object_contents, alignment>(map, slot, object,
214                                                   object_size)) {
215       return;
216     }
217 
218     // If promotion failed, we try to copy the object to the other semi-space
219     if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
220 
221     FatalProcessOutOfMemory("Scavenger: semi-space copy\n");
222   }
223 
224 
EvacuateJSFunction(Map * map,HeapObject ** slot,HeapObject * object)225   static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
226                                         HeapObject* object) {
227     ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
228 
229     if (marks_handling == IGNORE_MARKS) return;
230 
231     MapWord map_word = object->map_word();
232     DCHECK(map_word.IsForwardingAddress());
233     HeapObject* target = map_word.ToForwardingAddress();
234 
235     MarkBit mark_bit = Marking::MarkBitFrom(target);
236     if (Marking::IsBlack(mark_bit)) {
237       // This object is black and it might not be rescanned by marker.
238       // We should explicitly record code entry slot for compaction because
239       // promotion queue processing (IterateAndMarkPointersToFromSpace) will
240       // miss it as it is not HeapObject-tagged.
241       Address code_entry_slot =
242           target->address() + JSFunction::kCodeEntryOffset;
243       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
244       map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
245           target, code_entry_slot, code);
246     }
247   }
248 
249 
EvacuateFixedArray(Map * map,HeapObject ** slot,HeapObject * object)250   static inline void EvacuateFixedArray(Map* map, HeapObject** slot,
251                                         HeapObject* object) {
252     int length = reinterpret_cast<FixedArray*>(object)->synchronized_length();
253     int object_size = FixedArray::SizeFor(length);
254     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
255                                                  object_size);
256   }
257 
258 
EvacuateFixedDoubleArray(Map * map,HeapObject ** slot,HeapObject * object)259   static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot,
260                                               HeapObject* object) {
261     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
262     int object_size = FixedDoubleArray::SizeFor(length);
263     EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
264   }
265 
266 
EvacuateFixedTypedArray(Map * map,HeapObject ** slot,HeapObject * object)267   static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot,
268                                              HeapObject* object) {
269     int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
270     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
271                                                  object_size);
272   }
273 
274 
EvacuateFixedFloat64Array(Map * map,HeapObject ** slot,HeapObject * object)275   static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
276                                                HeapObject* object) {
277     int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
278     EvacuateObject<POINTER_OBJECT, kDoubleAligned>(map, slot, object,
279                                                    object_size);
280   }
281 
282 
EvacuateJSArrayBuffer(Map * map,HeapObject ** slot,HeapObject * object)283   static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
284                                            HeapObject* object) {
285     ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
286 
287     Heap* heap = map->GetHeap();
288     MapWord map_word = object->map_word();
289     DCHECK(map_word.IsForwardingAddress());
290     HeapObject* target = map_word.ToForwardingAddress();
291     if (!heap->InNewSpace(target)) {
292       heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
293     }
294   }
295 
296 
EvacuateByteArray(Map * map,HeapObject ** slot,HeapObject * object)297   static inline void EvacuateByteArray(Map* map, HeapObject** slot,
298                                        HeapObject* object) {
299     int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
300     EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
301   }
302 
303 
EvacuateSeqOneByteString(Map * map,HeapObject ** slot,HeapObject * object)304   static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
305                                               HeapObject* object) {
306     int object_size = SeqOneByteString::cast(object)
307                           ->SeqOneByteStringSize(map->instance_type());
308     EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
309   }
310 
311 
EvacuateSeqTwoByteString(Map * map,HeapObject ** slot,HeapObject * object)312   static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot,
313                                               HeapObject* object) {
314     int object_size = SeqTwoByteString::cast(object)
315                           ->SeqTwoByteStringSize(map->instance_type());
316     EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
317   }
318 
319 
EvacuateShortcutCandidate(Map * map,HeapObject ** slot,HeapObject * object)320   static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
321                                                HeapObject* object) {
322     DCHECK(IsShortcutCandidate(map->instance_type()));
323 
324     Heap* heap = map->GetHeap();
325 
326     if (marks_handling == IGNORE_MARKS &&
327         ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
328       HeapObject* first =
329           HeapObject::cast(ConsString::cast(object)->unchecked_first());
330 
331       *slot = first;
332 
333       if (!heap->InNewSpace(first)) {
334         object->set_map_word(MapWord::FromForwardingAddress(first));
335         return;
336       }
337 
338       MapWord first_word = first->map_word();
339       if (first_word.IsForwardingAddress()) {
340         HeapObject* target = first_word.ToForwardingAddress();
341 
342         *slot = target;
343         object->set_map_word(MapWord::FromForwardingAddress(target));
344         return;
345       }
346 
347       Scavenger::ScavengeObjectSlow(slot, first);
348       object->set_map_word(MapWord::FromForwardingAddress(*slot));
349       return;
350     }
351 
352     int object_size = ConsString::kSize;
353     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
354                                                  object_size);
355   }
356 
357   template <ObjectContents object_contents>
358   class ObjectEvacuationStrategy {
359    public:
360     template <int object_size>
VisitSpecialized(Map * map,HeapObject ** slot,HeapObject * object)361     static inline void VisitSpecialized(Map* map, HeapObject** slot,
362                                         HeapObject* object) {
363       EvacuateObject<object_contents, kWordAligned>(map, slot, object,
364                                                     object_size);
365     }
366 
Visit(Map * map,HeapObject ** slot,HeapObject * object)367     static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) {
368       int object_size = map->instance_size();
369       EvacuateObject<object_contents, kWordAligned>(map, slot, object,
370                                                     object_size);
371     }
372   };
373 
374   static VisitorDispatchTable<ScavengingCallback> table_;
375 };
376 
377 
378 template <MarksHandling marks_handling,
379           LoggingAndProfiling logging_and_profiling_mode>
380 VisitorDispatchTable<ScavengingCallback>
381     ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
382 
383 
384 // static
Initialize()385 void Scavenger::Initialize() {
386   ScavengingVisitor<TRANSFER_MARKS,
387                     LOGGING_AND_PROFILING_DISABLED>::Initialize();
388   ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
389   ScavengingVisitor<TRANSFER_MARKS,
390                     LOGGING_AND_PROFILING_ENABLED>::Initialize();
391   ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
392 }
393 
394 
395 // static
ScavengeObjectSlow(HeapObject ** p,HeapObject * object)396 void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
397   SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
398   MapWord first_word = object->map_word();
399   SLOW_DCHECK(!first_word.IsForwardingAddress());
400   Map* map = first_word.ToMap();
401   Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
402   scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
403 }
404 
405 
SelectScavengingVisitorsTable()406 void Scavenger::SelectScavengingVisitorsTable() {
407   bool logging_and_profiling =
408       FLAG_verify_predictable || isolate()->logger()->is_logging() ||
409       isolate()->cpu_profiler()->is_profiling() ||
410       (isolate()->heap_profiler() != NULL &&
411        isolate()->heap_profiler()->is_tracking_object_moves());
412 
413   if (!heap()->incremental_marking()->IsMarking()) {
414     if (!logging_and_profiling) {
415       scavenging_visitors_table_.CopyFrom(
416           ScavengingVisitor<IGNORE_MARKS,
417                             LOGGING_AND_PROFILING_DISABLED>::GetTable());
418     } else {
419       scavenging_visitors_table_.CopyFrom(
420           ScavengingVisitor<IGNORE_MARKS,
421                             LOGGING_AND_PROFILING_ENABLED>::GetTable());
422     }
423   } else {
424     if (!logging_and_profiling) {
425       scavenging_visitors_table_.CopyFrom(
426           ScavengingVisitor<TRANSFER_MARKS,
427                             LOGGING_AND_PROFILING_DISABLED>::GetTable());
428     } else {
429       scavenging_visitors_table_.CopyFrom(
430           ScavengingVisitor<TRANSFER_MARKS,
431                             LOGGING_AND_PROFILING_ENABLED>::GetTable());
432     }
433 
434     if (heap()->incremental_marking()->IsCompacting()) {
435       // When compacting forbid short-circuiting of cons-strings.
436       // Scavenging code relies on the fact that new space object
437       // can't be evacuated into evacuation candidate but
438       // short-circuiting violates this assumption.
439       scavenging_visitors_table_.Register(
440           StaticVisitorBase::kVisitShortcutCandidate,
441           scavenging_visitors_table_.GetVisitorById(
442               StaticVisitorBase::kVisitConsString));
443     }
444   }
445 }
446 
447 
isolate()448 Isolate* Scavenger::isolate() { return heap()->isolate(); }
449 
450 
VisitPointer(Object ** p)451 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); }
452 
453 
VisitPointers(Object ** start,Object ** end)454 void ScavengeVisitor::VisitPointers(Object** start, Object** end) {
455   // Copy all HeapObject pointers in [start, end)
456   for (Object** p = start; p < end; p++) ScavengePointer(p);
457 }
458 
459 
ScavengePointer(Object ** p)460 void ScavengeVisitor::ScavengePointer(Object** p) {
461   Object* object = *p;
462   if (!heap_->InNewSpace(object)) return;
463   Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
464                             reinterpret_cast<HeapObject*>(object));
465 }
466 
467 }  // namespace internal
468 }  // namespace v8
469